V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
factory.cc
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/factory.h"
6 
7 #include "src/accessors.h"
8 #include "src/allocation-site-scopes.h"
9 #include "src/ast/ast-source-ranges.h"
10 #include "src/ast/ast.h"
11 #include "src/base/bits.h"
12 #include "src/bootstrapper.h"
13 #include "src/builtins/constants-table-builder.h"
14 #include "src/compiler.h"
15 #include "src/conversions.h"
16 #include "src/counters.h"
17 #include "src/interpreter/interpreter.h"
18 #include "src/isolate-inl.h"
19 #include "src/macro-assembler.h"
20 #include "src/objects/api-callbacks.h"
21 #include "src/objects/arguments-inl.h"
22 #include "src/objects/bigint.h"
23 #include "src/objects/debug-objects-inl.h"
24 #include "src/objects/embedder-data-array-inl.h"
25 #include "src/objects/frame-array-inl.h"
26 #include "src/objects/instance-type-inl.h"
27 #include "src/objects/js-array-inl.h"
28 #include "src/objects/js-collection-inl.h"
29 #include "src/objects/js-generator-inl.h"
30 #include "src/objects/js-regexp-inl.h"
31 #include "src/objects/js-weak-refs-inl.h"
32 #include "src/objects/literal-objects-inl.h"
33 #include "src/objects/microtask-inl.h"
34 #include "src/objects/module-inl.h"
35 #include "src/objects/promise-inl.h"
36 #include "src/objects/scope-info.h"
37 #include "src/objects/stack-frame-info-inl.h"
38 #include "src/unicode-cache.h"
39 #include "src/unicode-decoder.h"
40 
41 namespace v8 {
42 namespace internal {
43 
44 namespace {
45 
46 int ComputeCodeObjectSize(const CodeDesc& desc) {
47  bool has_unwinding_info = desc.unwinding_info != nullptr;
48  DCHECK((has_unwinding_info && desc.unwinding_info_size > 0) ||
49  (!has_unwinding_info && desc.unwinding_info_size == 0));
50  int body_size = desc.instr_size;
51  int unwinding_info_size_field_size = kInt64Size;
52  if (has_unwinding_info) {
53  body_size = RoundUp(body_size, kInt64Size) + desc.unwinding_info_size +
54  unwinding_info_size_field_size;
55  }
56  int object_size = Code::SizeFor(RoundUp(body_size, kObjectAlignment));
57  DCHECK(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment));
58  return object_size;
59 }
60 
61 void InitializeCode(Heap* heap, Handle<Code> code, int object_size,
62  const CodeDesc& desc, Code::Kind kind,
63  Handle<Object> self_ref, int32_t builtin_index,
64  Handle<ByteArray> source_position_table,
65  Handle<DeoptimizationData> deopt_data,
66  Handle<ByteArray> reloc_info,
67  Handle<CodeDataContainer> data_container, uint32_t stub_key,
68  bool is_turbofanned, int stack_slots,
69  int safepoint_table_offset, int handler_table_offset) {
70  DCHECK(IsAligned(code->address(), kCodeAlignment));
71  DCHECK_IMPLIES(
72  !heap->memory_allocator()->code_range().is_empty(),
73  heap->memory_allocator()->code_range().contains(code->address()));
74 
75  bool has_unwinding_info = desc.unwinding_info != nullptr;
76 
77  code->set_raw_instruction_size(desc.instr_size);
78  code->set_relocation_info(*reloc_info);
79  const bool is_off_heap_trampoline = false;
80  code->initialize_flags(kind, has_unwinding_info, is_turbofanned, stack_slots,
81  is_off_heap_trampoline);
82  code->set_safepoint_table_offset(safepoint_table_offset);
83  code->set_handler_table_offset(handler_table_offset);
84  code->set_code_data_container(*data_container);
85  code->set_deoptimization_data(*deopt_data);
86  code->set_stub_key(stub_key);
87  code->set_source_position_table(*source_position_table);
88  code->set_constant_pool_offset(desc.instr_size - desc.constant_pool_size);
89  code->set_builtin_index(builtin_index);
90 
91  // Allow self references to created code object by patching the handle to
92  // point to the newly allocated Code object.
93  if (!self_ref.is_null()) {
94  DCHECK(self_ref->IsOddball());
95  DCHECK(Oddball::cast(*self_ref)->kind() == Oddball::kSelfReferenceMarker);
96  if (FLAG_embedded_builtins) {
97  auto builder = heap->isolate()->builtins_constants_table_builder();
98  if (builder != nullptr) builder->PatchSelfReference(self_ref, code);
99  }
100  *(self_ref.location()) = code->ptr();
101  }
102 
103  // Migrate generated code.
104  // The generated code can contain Object** values (typically from handles)
105  // that are dereferenced during the copy to point directly to the actual heap
106  // objects. These pointers can include references to the code object itself,
107  // through the self_reference parameter.
108  code->CopyFromNoFlush(heap, desc);
109 
110  code->clear_padding();
111 
112 #ifdef VERIFY_HEAP
113  if (FLAG_verify_heap) code->ObjectVerify(heap->isolate());
114 #endif
115 }
116 
117 } // namespace
118 
119 HeapObject* Factory::AllocateRawWithImmortalMap(int size,
120  PretenureFlag pretenure,
121  Map map,
122  AllocationAlignment alignment) {
123  HeapObject* result = isolate()->heap()->AllocateRawWithRetryOrFail(
124  size, Heap::SelectSpace(pretenure), alignment);
125  result->set_map_after_allocation(map, SKIP_WRITE_BARRIER);
126  return result;
127 }
128 
129 HeapObject* Factory::AllocateRawWithAllocationSite(
130  Handle<Map> map, PretenureFlag pretenure,
131  Handle<AllocationSite> allocation_site) {
132  DCHECK(map->instance_type() != MAP_TYPE);
133  int size = map->instance_size();
134  if (!allocation_site.is_null()) size += AllocationMemento::kSize;
135  AllocationSpace space = Heap::SelectSpace(pretenure);
136  HeapObject* result =
137  isolate()->heap()->AllocateRawWithRetryOrFail(size, space);
138  WriteBarrierMode write_barrier_mode =
139  space == NEW_SPACE ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
140  result->set_map_after_allocation(*map, write_barrier_mode);
141  if (!allocation_site.is_null()) {
142  AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
143  reinterpret_cast<Address>(result) + map->instance_size());
144  InitializeAllocationMemento(alloc_memento, *allocation_site);
145  }
146  return result;
147 }
148 
149 void Factory::InitializeAllocationMemento(AllocationMemento* memento,
150  AllocationSite* allocation_site) {
151  memento->set_map_after_allocation(*allocation_memento_map(),
152  SKIP_WRITE_BARRIER);
153  memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER);
154  if (FLAG_allocation_site_pretenuring) {
155  allocation_site->IncrementMementoCreateCount();
156  }
157 }
158 
159 HeapObject* Factory::AllocateRawArray(int size, PretenureFlag pretenure) {
160  AllocationSpace space = Heap::SelectSpace(pretenure);
161  HeapObject* result =
162  isolate()->heap()->AllocateRawWithRetryOrFail(size, space);
163  if (size > kMaxRegularHeapObjectSize && FLAG_use_marking_progress_bar) {
164  MemoryChunk* chunk = MemoryChunk::FromAddress(result->address());
165  chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR);
166  }
167  return result;
168 }
169 
170 HeapObject* Factory::AllocateRawFixedArray(int length,
171  PretenureFlag pretenure) {
172  if (length < 0 || length > FixedArray::kMaxLength) {
173  isolate()->heap()->FatalProcessOutOfMemory("invalid array length");
174  }
175  return AllocateRawArray(FixedArray::SizeFor(length), pretenure);
176 }
177 
178 HeapObject* Factory::AllocateRawWeakArrayList(int capacity,
179  PretenureFlag pretenure) {
180  if (capacity < 0 || capacity > WeakArrayList::kMaxCapacity) {
181  isolate()->heap()->FatalProcessOutOfMemory("invalid array length");
182  }
183  return AllocateRawArray(WeakArrayList::SizeForCapacity(capacity), pretenure);
184 }
185 
186 HeapObject* Factory::New(Handle<Map> map, PretenureFlag pretenure) {
187  DCHECK(map->instance_type() != MAP_TYPE);
188  int size = map->instance_size();
189  AllocationSpace space = Heap::SelectSpace(pretenure);
190  HeapObject* result =
191  isolate()->heap()->AllocateRawWithRetryOrFail(size, space);
192  // New space objects are allocated white.
193  WriteBarrierMode write_barrier_mode =
194  space == NEW_SPACE ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
195  result->set_map_after_allocation(*map, write_barrier_mode);
196  return result;
197 }
198 
199 Handle<HeapObject> Factory::NewFillerObject(int size, bool double_align,
200  AllocationSpace space) {
201  AllocationAlignment alignment = double_align ? kDoubleAligned : kWordAligned;
202  Heap* heap = isolate()->heap();
203  HeapObject* result = heap->AllocateRawWithRetryOrFail(size, space, alignment);
204 #ifdef DEBUG
205  MemoryChunk* chunk = MemoryChunk::FromAddress(result->address());
206  DCHECK(chunk->owner()->identity() == space);
207 #endif
208  heap->CreateFillerObjectAt(result->address(), size, ClearRecordedSlots::kNo);
209  return Handle<HeapObject>(result, isolate());
210 }
211 
212 Handle<PrototypeInfo> Factory::NewPrototypeInfo() {
213  Handle<PrototypeInfo> result =
214  Handle<PrototypeInfo>::cast(NewStruct(PROTOTYPE_INFO_TYPE, TENURED));
215  result->set_prototype_users(Smi::kZero);
216  result->set_registry_slot(PrototypeInfo::UNREGISTERED);
217  result->set_bit_field(0);
218  result->set_module_namespace(*undefined_value());
219  return result;
220 }
221 
222 Handle<EnumCache> Factory::NewEnumCache(Handle<FixedArray> keys,
223  Handle<FixedArray> indices) {
224  return Handle<EnumCache>::cast(NewTuple2(keys, indices, TENURED));
225 }
226 
227 Handle<Tuple2> Factory::NewTuple2(Handle<Object> value1, Handle<Object> value2,
228  PretenureFlag pretenure) {
229  Handle<Tuple2> result =
230  Handle<Tuple2>::cast(NewStruct(TUPLE2_TYPE, pretenure));
231  result->set_value1(*value1);
232  result->set_value2(*value2);
233  return result;
234 }
235 
236 Handle<Tuple3> Factory::NewTuple3(Handle<Object> value1, Handle<Object> value2,
237  Handle<Object> value3,
238  PretenureFlag pretenure) {
239  Handle<Tuple3> result =
240  Handle<Tuple3>::cast(NewStruct(TUPLE3_TYPE, pretenure));
241  result->set_value1(*value1);
242  result->set_value2(*value2);
243  result->set_value3(*value3);
244  return result;
245 }
246 
247 Handle<ArrayBoilerplateDescription> Factory::NewArrayBoilerplateDescription(
248  ElementsKind elements_kind, Handle<FixedArrayBase> constant_values) {
249  Handle<ArrayBoilerplateDescription> result =
250  Handle<ArrayBoilerplateDescription>::cast(
251  NewStruct(ARRAY_BOILERPLATE_DESCRIPTION_TYPE, TENURED));
252  result->set_elements_kind(elements_kind);
253  result->set_constant_elements(*constant_values);
254  return result;
255 }
256 
257 Handle<TemplateObjectDescription> Factory::NewTemplateObjectDescription(
258  Handle<FixedArray> raw_strings, Handle<FixedArray> cooked_strings) {
259  DCHECK_EQ(raw_strings->length(), cooked_strings->length());
260  DCHECK_LT(0, raw_strings->length());
261  Handle<TemplateObjectDescription> result =
262  Handle<TemplateObjectDescription>::cast(NewStruct(TUPLE2_TYPE, TENURED));
263  result->set_raw_strings(*raw_strings);
264  result->set_cooked_strings(*cooked_strings);
265  return result;
266 }
267 
268 Handle<Oddball> Factory::NewOddball(Handle<Map> map, const char* to_string,
269  Handle<Object> to_number,
270  const char* type_of, byte kind,
271  PretenureFlag pretenure) {
272  Handle<Oddball> oddball(Oddball::cast(New(map, pretenure)), isolate());
273  Oddball::Initialize(isolate(), oddball, to_string, to_number, type_of, kind);
274  return oddball;
275 }
276 
277 Handle<Oddball> Factory::NewSelfReferenceMarker(PretenureFlag pretenure) {
278  return NewOddball(self_reference_marker_map(), "self_reference_marker",
279  handle(Smi::FromInt(-1), isolate()), "undefined",
280  Oddball::kSelfReferenceMarker, pretenure);
281 }
282 
283 Handle<PropertyArray> Factory::NewPropertyArray(int length,
284  PretenureFlag pretenure) {
285  DCHECK_LE(0, length);
286  if (length == 0) return empty_property_array();
287  HeapObject* result = AllocateRawFixedArray(length, pretenure);
288  result->set_map_after_allocation(*property_array_map(), SKIP_WRITE_BARRIER);
289  Handle<PropertyArray> array(PropertyArray::cast(result), isolate());
290  array->initialize_length(length);
291  MemsetPointer(array->data_start(), *undefined_value(), length);
292  return array;
293 }
294 
295 Handle<FixedArray> Factory::NewFixedArrayWithFiller(RootIndex map_root_index,
296  int length, Object* filler,
297  PretenureFlag pretenure) {
298  HeapObject* result = AllocateRawFixedArray(length, pretenure);
299  DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
300  Map map = Map::cast(isolate()->root(map_root_index));
301  result->set_map_after_allocation(map, SKIP_WRITE_BARRIER);
302  Handle<FixedArray> array(FixedArray::cast(result), isolate());
303  array->set_length(length);
304  MemsetPointer(array->data_start(), filler, length);
305  return array;
306 }
307 
308 template <typename T>
309 Handle<T> Factory::NewFixedArrayWithMap(RootIndex map_root_index, int length,
310  PretenureFlag pretenure) {
311  static_assert(std::is_base_of<FixedArray, T>::value,
312  "T must be a descendant of FixedArray");
313  // Zero-length case must be handled outside, where the knowledge about
314  // the map is.
315  DCHECK_LT(0, length);
316  return Handle<T>::cast(NewFixedArrayWithFiller(
317  map_root_index, length, *undefined_value(), pretenure));
318 }
319 
320 template <typename T>
321 Handle<T> Factory::NewWeakFixedArrayWithMap(RootIndex map_root_index,
322  int length,
323  PretenureFlag pretenure) {
324  static_assert(std::is_base_of<WeakFixedArray, T>::value,
325  "T must be a descendant of WeakFixedArray");
326 
327  // Zero-length case must be handled outside.
328  DCHECK_LT(0, length);
329 
330  HeapObject* result =
331  AllocateRawArray(WeakFixedArray::SizeFor(length), pretenure);
332  Map map = Map::cast(isolate()->root(map_root_index));
333  result->set_map_after_allocation(map, SKIP_WRITE_BARRIER);
334 
335  Handle<WeakFixedArray> array(WeakFixedArray::cast(result), isolate());
336  array->set_length(length);
337  MemsetPointer(ObjectSlot(array->data_start()), *undefined_value(), length);
338 
339  return Handle<T>::cast(array);
340 }
341 
342 template Handle<FixedArray> Factory::NewFixedArrayWithMap<FixedArray>(
343  RootIndex, int, PretenureFlag);
344 
345 Handle<FixedArray> Factory::NewFixedArray(int length, PretenureFlag pretenure) {
346  DCHECK_LE(0, length);
347  if (length == 0) return empty_fixed_array();
348  return NewFixedArrayWithFiller(RootIndex::kFixedArrayMap, length,
349  *undefined_value(), pretenure);
350 }
351 
352 Handle<WeakFixedArray> Factory::NewWeakFixedArray(int length,
353  PretenureFlag pretenure) {
354  DCHECK_LE(0, length);
355  if (length == 0) return empty_weak_fixed_array();
356  HeapObject* result =
357  AllocateRawArray(WeakFixedArray::SizeFor(length), pretenure);
358  DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kWeakFixedArrayMap));
359  result->set_map_after_allocation(*weak_fixed_array_map(), SKIP_WRITE_BARRIER);
360  Handle<WeakFixedArray> array(WeakFixedArray::cast(result), isolate());
361  array->set_length(length);
362  MemsetPointer(ObjectSlot(array->data_start()), *undefined_value(), length);
363  return array;
364 }
365 
366 MaybeHandle<FixedArray> Factory::TryNewFixedArray(int length,
367  PretenureFlag pretenure) {
368  DCHECK_LE(0, length);
369  if (length == 0) return empty_fixed_array();
370 
371  int size = FixedArray::SizeFor(length);
372  AllocationSpace space = Heap::SelectSpace(pretenure);
373  Heap* heap = isolate()->heap();
374  AllocationResult allocation = heap->AllocateRaw(size, space);
375  HeapObject* result = nullptr;
376  if (!allocation.To(&result)) return MaybeHandle<FixedArray>();
377  if (size > kMaxRegularHeapObjectSize && FLAG_use_marking_progress_bar) {
378  MemoryChunk* chunk = MemoryChunk::FromAddress(result->address());
379  chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR);
380  }
381  result->set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
382  Handle<FixedArray> array(FixedArray::cast(result), isolate());
383  array->set_length(length);
384  MemsetPointer(array->data_start(), ReadOnlyRoots(heap).undefined_value(),
385  length);
386  return array;
387 }
388 
389 Handle<FixedArray> Factory::NewFixedArrayWithHoles(int length,
390  PretenureFlag pretenure) {
391  DCHECK_LE(0, length);
392  if (length == 0) return empty_fixed_array();
393  return NewFixedArrayWithFiller(RootIndex::kFixedArrayMap, length,
394  *the_hole_value(), pretenure);
395 }
396 
397 Handle<FixedArray> Factory::NewUninitializedFixedArray(
398  int length, PretenureFlag pretenure) {
399  DCHECK_LE(0, length);
400  if (length == 0) return empty_fixed_array();
401 
402  // TODO(ulan): As an experiment this temporarily returns an initialized fixed
403  // array. After getting canary/performance coverage, either remove the
404  // function or revert to returning uninitilized array.
405  return NewFixedArrayWithFiller(RootIndex::kFixedArrayMap, length,
406  *undefined_value(), pretenure);
407 }
408 
409 Handle<FeedbackVector> Factory::NewFeedbackVector(
410  Handle<SharedFunctionInfo> shared, PretenureFlag pretenure) {
411  int length = shared->feedback_metadata()->slot_count();
412  DCHECK_LE(0, length);
413  int size = FeedbackVector::SizeFor(length);
414 
415  HeapObject* result =
416  AllocateRawWithImmortalMap(size, pretenure, *feedback_vector_map());
417  Handle<FeedbackVector> vector(FeedbackVector::cast(result), isolate());
418  vector->set_shared_function_info(*shared);
419  vector->set_optimized_code_weak_or_smi(MaybeObject::FromSmi(Smi::FromEnum(
420  FLAG_log_function_events ? OptimizationMarker::kLogFirstExecution
421  : OptimizationMarker::kNone)));
422  vector->set_length(length);
423  vector->set_invocation_count(0);
424  vector->set_profiler_ticks(0);
425  vector->set_deopt_count(0);
426  // TODO(leszeks): Initialize based on the feedback metadata.
427  MemsetPointer(ObjectSlot(vector->slots_start()), *undefined_value(), length);
428  return vector;
429 }
430 
431 Handle<EmbedderDataArray> Factory::NewEmbedderDataArray(
432  int length, PretenureFlag pretenure) {
433  DCHECK_LE(0, length);
434  int size = EmbedderDataArray::SizeFor(length);
435 
436  HeapObject* result =
437  AllocateRawWithImmortalMap(size, pretenure, *embedder_data_array_map());
438  Handle<EmbedderDataArray> array(EmbedderDataArray::cast(result), isolate());
439  array->set_length(length);
440 
441  if (length > 0) {
442  ObjectSlot start(array->slots_start());
443  ObjectSlot end(array->slots_end());
444  size_t slot_count = end - start;
445  MemsetPointer(start, *undefined_value(), slot_count);
446  }
447  return array;
448 }
449 
450 Handle<ObjectBoilerplateDescription> Factory::NewObjectBoilerplateDescription(
451  int boilerplate, int all_properties, int index_keys, bool has_seen_proto) {
452  DCHECK_GE(boilerplate, 0);
453  DCHECK_GE(all_properties, index_keys);
454  DCHECK_GE(index_keys, 0);
455 
456  int backing_store_size =
457  all_properties - index_keys - (has_seen_proto ? 1 : 0);
458  DCHECK_GE(backing_store_size, 0);
459  bool has_different_size_backing_store = boilerplate != backing_store_size;
460 
461  // Space for name and value for every boilerplate property + LiteralType flag.
462  int size =
463  2 * boilerplate + ObjectBoilerplateDescription::kDescriptionStartIndex;
464 
465  if (has_different_size_backing_store) {
466  // An extra entry for the backing store size.
467  size++;
468  }
469 
470  Handle<ObjectBoilerplateDescription> description =
471  Handle<ObjectBoilerplateDescription>::cast(NewFixedArrayWithMap(
472  RootIndex::kObjectBoilerplateDescriptionMap, size, TENURED));
473 
474  if (has_different_size_backing_store) {
475  DCHECK_IMPLIES((boilerplate == (all_properties - index_keys)),
476  has_seen_proto);
477  description->set_backing_store_size(isolate(), backing_store_size);
478  }
479 
480  description->set_flags(0);
481 
482  return description;
483 }
484 
485 Handle<FixedArrayBase> Factory::NewFixedDoubleArray(int length,
486  PretenureFlag pretenure) {
487  DCHECK_LE(0, length);
488  if (length == 0) return empty_fixed_array();
489  if (length > FixedDoubleArray::kMaxLength) {
490  isolate()->heap()->FatalProcessOutOfMemory("invalid array length");
491  }
492  int size = FixedDoubleArray::SizeFor(length);
493  Map map = *fixed_double_array_map();
494  HeapObject* result =
495  AllocateRawWithImmortalMap(size, pretenure, map, kDoubleAligned);
496  Handle<FixedDoubleArray> array(FixedDoubleArray::cast(result), isolate());
497  array->set_length(length);
498  return array;
499 }
500 
501 Handle<FixedArrayBase> Factory::NewFixedDoubleArrayWithHoles(
502  int length, PretenureFlag pretenure) {
503  DCHECK_LE(0, length);
504  Handle<FixedArrayBase> array = NewFixedDoubleArray(length, pretenure);
505  if (length > 0) {
506  Handle<FixedDoubleArray>::cast(array)->FillWithHoles(0, length);
507  }
508  return array;
509 }
510 
511 Handle<FeedbackMetadata> Factory::NewFeedbackMetadata(int slot_count,
512  PretenureFlag tenure) {
513  DCHECK_LE(0, slot_count);
514  int size = FeedbackMetadata::SizeFor(slot_count);
515  HeapObject* result =
516  AllocateRawWithImmortalMap(size, tenure, *feedback_metadata_map());
517  Handle<FeedbackMetadata> data(FeedbackMetadata::cast(result), isolate());
518  data->set_slot_count(slot_count);
519 
520  // Initialize the data section to 0.
521  int data_size = size - FeedbackMetadata::kHeaderSize;
522  Address data_start = data->address() + FeedbackMetadata::kHeaderSize;
523  memset(reinterpret_cast<byte*>(data_start), 0, data_size);
524  // Fields have been zeroed out but not initialized, so this object will not
525  // pass object verification at this point.
526  return data;
527 }
528 
529 Handle<FrameArray> Factory::NewFrameArray(int number_of_frames,
530  PretenureFlag pretenure) {
531  DCHECK_LE(0, number_of_frames);
532  Handle<FixedArray> result = NewFixedArrayWithHoles(
533  FrameArray::LengthFor(number_of_frames), pretenure);
534  result->set(FrameArray::kFrameCountIndex, Smi::kZero);
535  return Handle<FrameArray>::cast(result);
536 }
537 
538 template <typename T>
539 Handle<T> Factory::AllocateSmallOrderedHashTable(Handle<Map> map, int capacity,
540  PretenureFlag pretenure) {
541  DCHECK_LE(0, capacity);
542  CHECK_LE(capacity, T::kMaxCapacity);
543  DCHECK_EQ(0, capacity % T::kLoadFactor);
544 
545  int size = T::SizeFor(capacity);
546  HeapObject* result = AllocateRawWithImmortalMap(size, pretenure, *map);
547  Handle<T> table(T::cast(result), isolate());
548  table->Initialize(isolate(), capacity);
549  return table;
550 }
551 
552 Handle<SmallOrderedHashSet> Factory::NewSmallOrderedHashSet(
553  int capacity, PretenureFlag pretenure) {
554  return AllocateSmallOrderedHashTable<SmallOrderedHashSet>(
555  small_ordered_hash_set_map(), capacity, pretenure);
556 }
557 
558 Handle<SmallOrderedHashMap> Factory::NewSmallOrderedHashMap(
559  int capacity, PretenureFlag pretenure) {
560  return AllocateSmallOrderedHashTable<SmallOrderedHashMap>(
561  small_ordered_hash_map_map(), capacity, pretenure);
562 }
563 
564 Handle<SmallOrderedNameDictionary> Factory::NewSmallOrderedNameDictionary(
565  int capacity, PretenureFlag pretenure) {
566  return AllocateSmallOrderedHashTable<SmallOrderedNameDictionary>(
567  small_ordered_name_dictionary_map(), capacity, pretenure);
568 }
569 
570 Handle<OrderedHashSet> Factory::NewOrderedHashSet() {
571  return OrderedHashSet::Allocate(isolate(), OrderedHashSet::kMinCapacity);
572 }
573 
574 Handle<OrderedHashMap> Factory::NewOrderedHashMap() {
575  return OrderedHashMap::Allocate(isolate(), OrderedHashMap::kMinCapacity);
576 }
577 
578 Handle<OrderedNameDictionary> Factory::NewOrderedNameDictionary() {
579  return OrderedNameDictionary::Allocate(isolate(),
580  OrderedNameDictionary::kMinCapacity);
581 }
582 
583 Handle<AccessorPair> Factory::NewAccessorPair() {
584  Handle<AccessorPair> accessors =
585  Handle<AccessorPair>::cast(NewStruct(ACCESSOR_PAIR_TYPE, TENURED));
586  accessors->set_getter(*null_value(), SKIP_WRITE_BARRIER);
587  accessors->set_setter(*null_value(), SKIP_WRITE_BARRIER);
588  return accessors;
589 }
590 
591 // Internalized strings are created in the old generation (data space).
592 Handle<String> Factory::InternalizeUtf8String(Vector<const char> string) {
593  Utf8StringKey key(string, isolate()->heap()->HashSeed());
594  return InternalizeStringWithKey(&key);
595 }
596 
597 Handle<String> Factory::InternalizeOneByteString(Vector<const uint8_t> string) {
598  OneByteStringKey key(string, isolate()->heap()->HashSeed());
599  return InternalizeStringWithKey(&key);
600 }
601 
602 Handle<String> Factory::InternalizeOneByteString(
603  Handle<SeqOneByteString> string, int from, int length) {
604  SeqOneByteSubStringKey key(isolate(), string, from, length);
605  return InternalizeStringWithKey(&key);
606 }
607 
608 Handle<String> Factory::InternalizeTwoByteString(Vector<const uc16> string) {
609  TwoByteStringKey key(string, isolate()->heap()->HashSeed());
610  return InternalizeStringWithKey(&key);
611 }
612 
613 template <class StringTableKey>
614 Handle<String> Factory::InternalizeStringWithKey(StringTableKey* key) {
615  return StringTable::LookupKey(isolate(), key);
616 }
617 
618 MaybeHandle<String> Factory::NewStringFromOneByte(Vector<const uint8_t> string,
619  PretenureFlag pretenure) {
620  DCHECK_NE(pretenure, TENURED_READ_ONLY);
621  int length = string.length();
622  if (length == 0) return empty_string();
623  if (length == 1) return LookupSingleCharacterStringFromCode(string[0]);
624  Handle<SeqOneByteString> result;
625  ASSIGN_RETURN_ON_EXCEPTION(isolate(), result,
626  NewRawOneByteString(string.length(), pretenure),
627  String);
628 
629  DisallowHeapAllocation no_gc;
630  // Copy the characters into the new object.
631  CopyChars(SeqOneByteString::cast(*result)->GetChars(), string.start(),
632  length);
633  return result;
634 }
635 
636 MaybeHandle<String> Factory::NewStringFromUtf8(Vector<const char> string,
637  PretenureFlag pretenure) {
638  DCHECK_NE(pretenure, TENURED_READ_ONLY);
639  // Check for ASCII first since this is the common case.
640  const char* ascii_data = string.start();
641  int length = string.length();
642  int non_ascii_start = String::NonAsciiStart(ascii_data, length);
643  if (non_ascii_start >= length) {
644  // If the string is ASCII, we do not need to convert the characters
645  // since UTF8 is backwards compatible with ASCII.
646  return NewStringFromOneByte(Vector<const uint8_t>::cast(string), pretenure);
647  }
648 
649  // Non-ASCII and we need to decode.
650  auto non_ascii = string.SubVector(non_ascii_start, length);
651  Access<UnicodeCache::Utf8Decoder> decoder(
652  isolate()->unicode_cache()->utf8_decoder());
653  decoder->Reset(non_ascii);
654 
655  int utf16_length = static_cast<int>(decoder->Utf16Length());
656  DCHECK_GT(utf16_length, 0);
657 
658  // Allocate string.
659  Handle<SeqTwoByteString> result;
660  ASSIGN_RETURN_ON_EXCEPTION(
661  isolate(), result,
662  NewRawTwoByteString(non_ascii_start + utf16_length, pretenure), String);
663 
664  // Copy ASCII portion.
665  DisallowHeapAllocation no_gc;
666  uint16_t* data = result->GetChars();
667  for (int i = 0; i < non_ascii_start; i++) {
668  *data++ = *ascii_data++;
669  }
670 
671  // Now write the remainder.
672  decoder->WriteUtf16(data, utf16_length, non_ascii);
673  return result;
674 }
675 
676 MaybeHandle<String> Factory::NewStringFromUtf8SubString(
677  Handle<SeqOneByteString> str, int begin, int length,
678  PretenureFlag pretenure) {
679  Access<UnicodeCache::Utf8Decoder> decoder(
680  isolate()->unicode_cache()->utf8_decoder());
681  int non_ascii_start;
682  int utf16_length = 0;
683  {
684  DisallowHeapAllocation no_gc;
685  const char* ascii_data =
686  reinterpret_cast<const char*>(str->GetChars() + begin);
687  non_ascii_start = String::NonAsciiStart(ascii_data, length);
688  if (non_ascii_start < length) {
689  // Non-ASCII and we need to decode.
690  auto non_ascii = Vector<const char>(ascii_data + non_ascii_start,
691  length - non_ascii_start);
692  decoder->Reset(non_ascii);
693 
694  utf16_length = static_cast<int>(decoder->Utf16Length());
695  }
696  }
697 
698  if (non_ascii_start >= length) {
699  // If the string is ASCII, we can just make a substring.
700  // TODO(v8): the pretenure flag is ignored in this case.
701  return NewSubString(str, begin, begin + length);
702  }
703 
704  DCHECK_GT(utf16_length, 0);
705 
706  // Allocate string.
707  Handle<SeqTwoByteString> result;
708  ASSIGN_RETURN_ON_EXCEPTION(
709  isolate(), result,
710  NewRawTwoByteString(non_ascii_start + utf16_length, pretenure), String);
711 
712  // Update pointer references, since the original string may have moved after
713  // allocation.
714  DisallowHeapAllocation no_gc;
715  const char* ascii_data =
716  reinterpret_cast<const char*>(str->GetChars() + begin);
717  auto non_ascii = Vector<const char>(ascii_data + non_ascii_start,
718  length - non_ascii_start);
719 
720  // Copy ASCII portion.
721  uint16_t* data = result->GetChars();
722  for (int i = 0; i < non_ascii_start; i++) {
723  *data++ = *ascii_data++;
724  }
725 
726  // Now write the remainder.
727  decoder->WriteUtf16(data, utf16_length, non_ascii);
728  return result;
729 }
730 
731 MaybeHandle<String> Factory::NewStringFromTwoByte(const uc16* string,
732  int length,
733  PretenureFlag pretenure) {
734  DCHECK_NE(pretenure, TENURED_READ_ONLY);
735  if (length == 0) return empty_string();
736  if (String::IsOneByte(string, length)) {
737  if (length == 1) return LookupSingleCharacterStringFromCode(string[0]);
738  Handle<SeqOneByteString> result;
739  ASSIGN_RETURN_ON_EXCEPTION(isolate(), result,
740  NewRawOneByteString(length, pretenure), String);
741  DisallowHeapAllocation no_gc;
742  CopyChars(result->GetChars(), string, length);
743  return result;
744  } else {
745  Handle<SeqTwoByteString> result;
746  ASSIGN_RETURN_ON_EXCEPTION(isolate(), result,
747  NewRawTwoByteString(length, pretenure), String);
748  DisallowHeapAllocation no_gc;
749  CopyChars(result->GetChars(), string, length);
750  return result;
751  }
752 }
753 
754 MaybeHandle<String> Factory::NewStringFromTwoByte(Vector<const uc16> string,
755  PretenureFlag pretenure) {
756  return NewStringFromTwoByte(string.start(), string.length(), pretenure);
757 }
758 
759 MaybeHandle<String> Factory::NewStringFromTwoByte(
760  const ZoneVector<uc16>* string, PretenureFlag pretenure) {
761  return NewStringFromTwoByte(string->data(), static_cast<int>(string->size()),
762  pretenure);
763 }
764 
765 namespace {
766 
767 bool inline IsOneByte(Vector<const char> str, int chars) {
768  // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
769  return chars == str.length();
770 }
771 
772 bool inline IsOneByte(Handle<String> str) {
773  return str->IsOneByteRepresentation();
774 }
775 
776 inline void WriteOneByteData(Vector<const char> vector, uint8_t* chars,
777  int len) {
778  // Only works for one byte strings.
779  DCHECK(vector.length() == len);
780  MemCopy(chars, vector.start(), len);
781 }
782 
783 inline void WriteTwoByteData(Vector<const char> vector, uint16_t* chars,
784  int len) {
786  while (!it.Done()) {
787  DCHECK_GT(len, 0);
788  len -= 1;
789 
790  uint16_t c = *it;
791  ++it;
792  DCHECK_NE(unibrow::Utf8::kBadChar, c);
793  *chars++ = c;
794  }
795  DCHECK_EQ(len, 0);
796 }
797 
798 inline void WriteOneByteData(Handle<String> s, uint8_t* chars, int len) {
799  DCHECK(s->length() == len);
800  String::WriteToFlat(*s, chars, 0, len);
801 }
802 
803 inline void WriteTwoByteData(Handle<String> s, uint16_t* chars, int len) {
804  DCHECK(s->length() == len);
805  String::WriteToFlat(*s, chars, 0, len);
806 }
807 
808 } // namespace
809 
810 Handle<SeqOneByteString> Factory::AllocateRawOneByteInternalizedString(
811  int length, uint32_t hash_field) {
812  CHECK_GE(String::kMaxLength, length);
813  // The canonical empty_string is the only zero-length string we allow.
814  DCHECK_IMPLIES(length == 0,
815  isolate()->roots_table()[RootIndex::kempty_string] == nullptr);
816 
817  Map map = *one_byte_internalized_string_map();
818  int size = SeqOneByteString::SizeFor(length);
819  HeapObject* result = AllocateRawWithImmortalMap(
820  size,
821  isolate()->heap()->CanAllocateInReadOnlySpace() ? TENURED_READ_ONLY
822  : TENURED,
823  map);
824  Handle<SeqOneByteString> answer(SeqOneByteString::cast(result), isolate());
825  answer->set_length(length);
826  answer->set_hash_field(hash_field);
827  DCHECK_EQ(size, answer->Size());
828  return answer;
829 }
830 
831 Handle<String> Factory::AllocateTwoByteInternalizedString(
832  Vector<const uc16> str, uint32_t hash_field) {
833  CHECK_GE(String::kMaxLength, str.length());
834  DCHECK_NE(0, str.length()); // Use Heap::empty_string() instead.
835 
836  Map map = *internalized_string_map();
837  int size = SeqTwoByteString::SizeFor(str.length());
838  HeapObject* result = AllocateRawWithImmortalMap(size, TENURED, map);
839  Handle<SeqTwoByteString> answer(SeqTwoByteString::cast(result), isolate());
840  answer->set_length(str.length());
841  answer->set_hash_field(hash_field);
842  DCHECK_EQ(size, answer->Size());
843  DisallowHeapAllocation no_gc;
844 
845  // Fill in the characters.
846  MemCopy(answer->GetChars(), str.start(), str.length() * kUC16Size);
847 
848  return answer;
849 }
850 
851 template <bool is_one_byte, typename T>
852 Handle<String> Factory::AllocateInternalizedStringImpl(T t, int chars,
853  uint32_t hash_field) {
854  DCHECK_LE(0, chars);
855  DCHECK_GE(String::kMaxLength, chars);
856 
857  // Compute map and object size.
858  int size;
859  Map map;
860  if (is_one_byte) {
861  map = *one_byte_internalized_string_map();
862  size = SeqOneByteString::SizeFor(chars);
863  } else {
864  map = *internalized_string_map();
865  size = SeqTwoByteString::SizeFor(chars);
866  }
867 
868  HeapObject* result = AllocateRawWithImmortalMap(
869  size,
870  isolate()->heap()->CanAllocateInReadOnlySpace() ? TENURED_READ_ONLY
871  : TENURED,
872  map);
873  Handle<String> answer(String::cast(result), isolate());
874  answer->set_length(chars);
875  answer->set_hash_field(hash_field);
876  DCHECK_EQ(size, answer->Size());
877  DisallowHeapAllocation no_gc;
878 
879  if (is_one_byte) {
880  WriteOneByteData(t, SeqOneByteString::cast(*answer)->GetChars(), chars);
881  } else {
882  WriteTwoByteData(t, SeqTwoByteString::cast(*answer)->GetChars(), chars);
883  }
884  return answer;
885 }
886 
887 Handle<String> Factory::NewInternalizedStringFromUtf8(Vector<const char> str,
888  int chars,
889  uint32_t hash_field) {
890  if (IsOneByte(str, chars)) {
891  Handle<SeqOneByteString> result =
892  AllocateRawOneByteInternalizedString(str.length(), hash_field);
893  DisallowHeapAllocation no_allocation;
894  MemCopy(result->GetChars(), str.start(), str.length());
895  return result;
896  }
897  return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
898 }
899 
900 Handle<String> Factory::NewOneByteInternalizedString(Vector<const uint8_t> str,
901  uint32_t hash_field) {
902  Handle<SeqOneByteString> result =
903  AllocateRawOneByteInternalizedString(str.length(), hash_field);
904  DisallowHeapAllocation no_allocation;
905  MemCopy(result->GetChars(), str.start(), str.length());
906  return result;
907 }
908 
909 Handle<String> Factory::NewOneByteInternalizedSubString(
910  Handle<SeqOneByteString> string, int offset, int length,
911  uint32_t hash_field) {
912  Handle<SeqOneByteString> result =
913  AllocateRawOneByteInternalizedString(length, hash_field);
914  DisallowHeapAllocation no_allocation;
915  MemCopy(result->GetChars(), string->GetChars() + offset, length);
916  return result;
917 }
918 
919 Handle<String> Factory::NewTwoByteInternalizedString(Vector<const uc16> str,
920  uint32_t hash_field) {
921  return AllocateTwoByteInternalizedString(str, hash_field);
922 }
923 
924 Handle<String> Factory::NewInternalizedStringImpl(Handle<String> string,
925  int chars,
926  uint32_t hash_field) {
927  if (IsOneByte(string)) {
928  return AllocateInternalizedStringImpl<true>(string, chars, hash_field);
929  }
930  return AllocateInternalizedStringImpl<false>(string, chars, hash_field);
931 }
932 
933 namespace {
934 
935 MaybeHandle<Map> GetInternalizedStringMap(Factory* f, Handle<String> string) {
936  switch (string->map()->instance_type()) {
937  case STRING_TYPE:
938  return f->internalized_string_map();
939  case ONE_BYTE_STRING_TYPE:
940  return f->one_byte_internalized_string_map();
941  case EXTERNAL_STRING_TYPE:
942  return f->external_internalized_string_map();
943  case EXTERNAL_ONE_BYTE_STRING_TYPE:
944  return f->external_one_byte_internalized_string_map();
945  case EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
946  return f->external_internalized_string_with_one_byte_data_map();
947  case UNCACHED_EXTERNAL_STRING_TYPE:
948  return f->uncached_external_internalized_string_map();
949  case UNCACHED_EXTERNAL_ONE_BYTE_STRING_TYPE:
950  return f->uncached_external_one_byte_internalized_string_map();
951  case UNCACHED_EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE:
952  return f->uncached_external_internalized_string_with_one_byte_data_map();
953  default:
954  return MaybeHandle<Map>(); // No match found.
955  }
956 }
957 
958 } // namespace
959 
960 MaybeHandle<Map> Factory::InternalizedStringMapForString(
961  Handle<String> string) {
962  // If the string is in new space it cannot be used as internalized.
963  if (Heap::InNewSpace(*string)) return MaybeHandle<Map>();
964 
965  return GetInternalizedStringMap(this, string);
966 }
967 
968 template <class StringClass>
969 Handle<StringClass> Factory::InternalizeExternalString(Handle<String> string) {
970  Handle<StringClass> cast_string = Handle<StringClass>::cast(string);
971  Handle<Map> map = GetInternalizedStringMap(this, string).ToHandleChecked();
972  Handle<StringClass> external_string(StringClass::cast(New(map, TENURED)),
973  isolate());
974  external_string->set_length(cast_string->length());
975  external_string->set_hash_field(cast_string->hash_field());
976  external_string->SetResource(isolate(), nullptr);
977  isolate()->heap()->RegisterExternalString(*external_string);
978  return external_string;
979 }
980 
981 template Handle<ExternalOneByteString>
982  Factory::InternalizeExternalString<ExternalOneByteString>(Handle<String>);
983 template Handle<ExternalTwoByteString>
984  Factory::InternalizeExternalString<ExternalTwoByteString>(Handle<String>);
985 
986 MaybeHandle<SeqOneByteString> Factory::NewRawOneByteString(
987  int length, PretenureFlag pretenure) {
988  if (length > String::kMaxLength || length < 0) {
989  THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), SeqOneByteString);
990  }
991  DCHECK_GT(length, 0); // Use Factory::empty_string() instead.
992  int size = SeqOneByteString::SizeFor(length);
993  DCHECK_GE(SeqOneByteString::kMaxSize, size);
994 
995  HeapObject* result =
996  AllocateRawWithImmortalMap(size, pretenure, *one_byte_string_map());
997  Handle<SeqOneByteString> string(SeqOneByteString::cast(result), isolate());
998  string->set_length(length);
999  string->set_hash_field(String::kEmptyHashField);
1000  DCHECK_EQ(size, string->Size());
1001  return string;
1002 }
1003 
1004 MaybeHandle<SeqTwoByteString> Factory::NewRawTwoByteString(
1005  int length, PretenureFlag pretenure) {
1006  if (length > String::kMaxLength || length < 0) {
1007  THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), SeqTwoByteString);
1008  }
1009  DCHECK_GT(length, 0); // Use Factory::empty_string() instead.
1010  int size = SeqTwoByteString::SizeFor(length);
1011  DCHECK_GE(SeqTwoByteString::kMaxSize, size);
1012 
1013  HeapObject* result =
1014  AllocateRawWithImmortalMap(size, pretenure, *string_map());
1015  Handle<SeqTwoByteString> string(SeqTwoByteString::cast(result), isolate());
1016  string->set_length(length);
1017  string->set_hash_field(String::kEmptyHashField);
1018  DCHECK_EQ(size, string->Size());
1019  return string;
1020 }
1021 
1022 Handle<String> Factory::LookupSingleCharacterStringFromCode(uint32_t code) {
1023  if (code <= String::kMaxOneByteCharCodeU) {
1024  {
1025  DisallowHeapAllocation no_allocation;
1026  Object* value = single_character_string_cache()->get(code);
1027  if (value != *undefined_value()) {
1028  return handle(String::cast(value), isolate());
1029  }
1030  }
1031  uint8_t buffer[1];
1032  buffer[0] = static_cast<uint8_t>(code);
1033  Handle<String> result =
1034  InternalizeOneByteString(Vector<const uint8_t>(buffer, 1));
1035  single_character_string_cache()->set(code, *result);
1036  return result;
1037  }
1038  DCHECK_LE(code, String::kMaxUtf16CodeUnitU);
1039 
1040  Handle<SeqTwoByteString> result = NewRawTwoByteString(1).ToHandleChecked();
1041  result->SeqTwoByteStringSet(0, static_cast<uint16_t>(code));
1042  return result;
1043 }
1044 
1045 // Returns true for a character in a range. Both limits are inclusive.
1046 static inline bool Between(uint32_t character, uint32_t from, uint32_t to) {
1047  // This makes uses of the the unsigned wraparound.
1048  return character - from <= to - from;
1049 }
1050 
1051 static inline Handle<String> MakeOrFindTwoCharacterString(Isolate* isolate,
1052  uint16_t c1,
1053  uint16_t c2) {
1054  // Numeric strings have a different hash algorithm not known by
1055  // LookupTwoCharsStringIfExists, so we skip this step for such strings.
1056  if (!Between(c1, '0', '9') || !Between(c2, '0', '9')) {
1057  Handle<String> result;
1058  if (StringTable::LookupTwoCharsStringIfExists(isolate, c1, c2)
1059  .ToHandle(&result)) {
1060  return result;
1061  }
1062  }
1063 
1064  // Now we know the length is 2, we might as well make use of that fact
1065  // when building the new string.
1066  if (static_cast<unsigned>(c1 | c2) <= String::kMaxOneByteCharCodeU) {
1067  // We can do this.
1068  DCHECK(base::bits::IsPowerOfTwo(String::kMaxOneByteCharCodeU +
1069  1)); // because of this.
1070  Handle<SeqOneByteString> str =
1071  isolate->factory()->NewRawOneByteString(2).ToHandleChecked();
1072  DisallowHeapAllocation no_allocation;
1073  uint8_t* dest = str->GetChars();
1074  dest[0] = static_cast<uint8_t>(c1);
1075  dest[1] = static_cast<uint8_t>(c2);
1076  return str;
1077  } else {
1078  Handle<SeqTwoByteString> str =
1079  isolate->factory()->NewRawTwoByteString(2).ToHandleChecked();
1080  DisallowHeapAllocation no_allocation;
1081  uc16* dest = str->GetChars();
1082  dest[0] = c1;
1083  dest[1] = c2;
1084  return str;
1085  }
1086 }
1087 
1088 template <typename SinkChar, typename StringType>
1089 Handle<String> ConcatStringContent(Handle<StringType> result,
1090  Handle<String> first,
1091  Handle<String> second) {
1092  DisallowHeapAllocation pointer_stays_valid;
1093  SinkChar* sink = result->GetChars();
1094  String::WriteToFlat(*first, sink, 0, first->length());
1095  String::WriteToFlat(*second, sink + first->length(), 0, second->length());
1096  return result;
1097 }
1098 
1099 MaybeHandle<String> Factory::NewConsString(Handle<String> left,
1100  Handle<String> right) {
1101  if (left->IsThinString()) {
1102  left = handle(Handle<ThinString>::cast(left)->actual(), isolate());
1103  }
1104  if (right->IsThinString()) {
1105  right = handle(Handle<ThinString>::cast(right)->actual(), isolate());
1106  }
1107  int left_length = left->length();
1108  if (left_length == 0) return right;
1109  int right_length = right->length();
1110  if (right_length == 0) return left;
1111 
1112  int length = left_length + right_length;
1113 
1114  if (length == 2) {
1115  uint16_t c1 = left->Get(0);
1116  uint16_t c2 = right->Get(0);
1117  return MakeOrFindTwoCharacterString(isolate(), c1, c2);
1118  }
1119 
1120  // Make sure that an out of memory exception is thrown if the length
1121  // of the new cons string is too large.
1122  if (length > String::kMaxLength || length < 0) {
1123  THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String);
1124  }
1125 
1126  bool left_is_one_byte = left->IsOneByteRepresentation();
1127  bool right_is_one_byte = right->IsOneByteRepresentation();
1128  bool is_one_byte = left_is_one_byte && right_is_one_byte;
1129  bool is_one_byte_data_in_two_byte_string = false;
1130  if (!is_one_byte) {
1131  // At least one of the strings uses two-byte representation so we
1132  // can't use the fast case code for uncached one-byte strings below, but
1133  // we can try to save memory if all chars actually fit in one-byte.
1134  is_one_byte_data_in_two_byte_string =
1135  left->HasOnlyOneByteChars() && right->HasOnlyOneByteChars();
1136  if (is_one_byte_data_in_two_byte_string) {
1137  isolate()->counters()->string_add_runtime_ext_to_one_byte()->Increment();
1138  }
1139  }
1140 
1141  // If the resulting string is small make a flat string.
1142  if (length < ConsString::kMinLength) {
1143  // Note that neither of the two inputs can be a slice because:
1144  STATIC_ASSERT(ConsString::kMinLength <= SlicedString::kMinLength);
1145  DCHECK(left->IsFlat());
1146  DCHECK(right->IsFlat());
1147 
1148  STATIC_ASSERT(ConsString::kMinLength <= String::kMaxLength);
1149  if (is_one_byte) {
1150  Handle<SeqOneByteString> result =
1151  NewRawOneByteString(length).ToHandleChecked();
1152  DisallowHeapAllocation no_gc;
1153  uint8_t* dest = result->GetChars();
1154  // Copy left part.
1155  const uint8_t* src =
1156  left->IsExternalString()
1157  ? Handle<ExternalOneByteString>::cast(left)->GetChars()
1158  : Handle<SeqOneByteString>::cast(left)->GetChars();
1159  for (int i = 0; i < left_length; i++) *dest++ = src[i];
1160  // Copy right part.
1161  src = right->IsExternalString()
1162  ? Handle<ExternalOneByteString>::cast(right)->GetChars()
1163  : Handle<SeqOneByteString>::cast(right)->GetChars();
1164  for (int i = 0; i < right_length; i++) *dest++ = src[i];
1165  return result;
1166  }
1167 
1168  return (is_one_byte_data_in_two_byte_string)
1169  ? ConcatStringContent<uint8_t>(
1170  NewRawOneByteString(length).ToHandleChecked(), left, right)
1171  : ConcatStringContent<uc16>(
1172  NewRawTwoByteString(length).ToHandleChecked(), left,
1173  right);
1174  }
1175 
1176  bool one_byte = (is_one_byte || is_one_byte_data_in_two_byte_string);
1177  return NewConsString(left, right, length, one_byte);
1178 }
1179 
1180 Handle<String> Factory::NewConsString(Handle<String> left, Handle<String> right,
1181  int length, bool one_byte) {
1182  DCHECK(!left->IsThinString());
1183  DCHECK(!right->IsThinString());
1184  DCHECK_GE(length, ConsString::kMinLength);
1185  DCHECK_LE(length, String::kMaxLength);
1186 
1187  Handle<ConsString> result(
1188  ConsString::cast(one_byte ? New(cons_one_byte_string_map(), NOT_TENURED)
1189  : New(cons_string_map(), NOT_TENURED)),
1190  isolate());
1191 
1192  DisallowHeapAllocation no_gc;
1193  WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
1194 
1195  result->set_hash_field(String::kEmptyHashField);
1196  result->set_length(length);
1197  result->set_first(isolate(), *left, mode);
1198  result->set_second(isolate(), *right, mode);
1199  return result;
1200 }
1201 
1202 Handle<String> Factory::NewSurrogatePairString(uint16_t lead, uint16_t trail) {
1203  DCHECK_GE(lead, 0xD800);
1204  DCHECK_LE(lead, 0xDBFF);
1205  DCHECK_GE(trail, 0xDC00);
1206  DCHECK_LE(trail, 0xDFFF);
1207 
1208  Handle<SeqTwoByteString> str =
1209  isolate()->factory()->NewRawTwoByteString(2).ToHandleChecked();
1210  DisallowHeapAllocation no_allocation;
1211  uc16* dest = str->GetChars();
1212  dest[0] = lead;
1213  dest[1] = trail;
1214  return str;
1215 }
1216 
1217 Handle<String> Factory::NewProperSubString(Handle<String> str, int begin,
1218  int end) {
1219 #if VERIFY_HEAP
1220  if (FLAG_verify_heap) str->StringVerify(isolate());
1221 #endif
1222  DCHECK(begin > 0 || end < str->length());
1223 
1224  str = String::Flatten(isolate(), str);
1225 
1226  int length = end - begin;
1227  if (length <= 0) return empty_string();
1228  if (length == 1) {
1229  return LookupSingleCharacterStringFromCode(str->Get(begin));
1230  }
1231  if (length == 2) {
1232  // Optimization for 2-byte strings often used as keys in a decompression
1233  // dictionary. Check whether we already have the string in the string
1234  // table to prevent creation of many unnecessary strings.
1235  uint16_t c1 = str->Get(begin);
1236  uint16_t c2 = str->Get(begin + 1);
1237  return MakeOrFindTwoCharacterString(isolate(), c1, c2);
1238  }
1239 
1240  if (!FLAG_string_slices || length < SlicedString::kMinLength) {
1241  if (str->IsOneByteRepresentation()) {
1242  Handle<SeqOneByteString> result =
1243  NewRawOneByteString(length).ToHandleChecked();
1244  DisallowHeapAllocation no_gc;
1245  uint8_t* dest = result->GetChars();
1246  String::WriteToFlat(*str, dest, begin, end);
1247  return result;
1248  } else {
1249  Handle<SeqTwoByteString> result =
1250  NewRawTwoByteString(length).ToHandleChecked();
1251  DisallowHeapAllocation no_gc;
1252  uc16* dest = result->GetChars();
1253  String::WriteToFlat(*str, dest, begin, end);
1254  return result;
1255  }
1256  }
1257 
1258  int offset = begin;
1259 
1260  if (str->IsSlicedString()) {
1261  Handle<SlicedString> slice = Handle<SlicedString>::cast(str);
1262  str = Handle<String>(slice->parent(), isolate());
1263  offset += slice->offset();
1264  }
1265  if (str->IsThinString()) {
1266  Handle<ThinString> thin = Handle<ThinString>::cast(str);
1267  str = handle(thin->actual(), isolate());
1268  }
1269 
1270  DCHECK(str->IsSeqString() || str->IsExternalString());
1271  Handle<Map> map = str->IsOneByteRepresentation()
1272  ? sliced_one_byte_string_map()
1273  : sliced_string_map();
1274  Handle<SlicedString> slice(SlicedString::cast(New(map, NOT_TENURED)),
1275  isolate());
1276 
1277  slice->set_hash_field(String::kEmptyHashField);
1278  slice->set_length(length);
1279  slice->set_parent(isolate(), *str);
1280  slice->set_offset(offset);
1281  return slice;
1282 }
1283 
1284 MaybeHandle<String> Factory::NewExternalStringFromOneByte(
1285  const ExternalOneByteString::Resource* resource) {
1286  size_t length = resource->length();
1287  if (length > static_cast<size_t>(String::kMaxLength)) {
1288  THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String);
1289  }
1290  if (length == 0) return empty_string();
1291 
1292  Handle<Map> map;
1293  if (!resource->IsCacheable()) {
1294  map = uncached_external_one_byte_string_map();
1295  } else {
1296  map = external_one_byte_string_map();
1297  }
1298  Handle<ExternalOneByteString> external_string(
1299  ExternalOneByteString::cast(New(map, TENURED)), isolate());
1300  external_string->set_length(static_cast<int>(length));
1301  external_string->set_hash_field(String::kEmptyHashField);
1302  external_string->SetResource(isolate(), resource);
1303  isolate()->heap()->RegisterExternalString(*external_string);
1304 
1305  return external_string;
1306 }
1307 
1308 MaybeHandle<String> Factory::NewExternalStringFromTwoByte(
1309  const ExternalTwoByteString::Resource* resource) {
1310  size_t length = resource->length();
1311  if (length > static_cast<size_t>(String::kMaxLength)) {
1312  THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String);
1313  }
1314  if (length == 0) return empty_string();
1315 
1316  // For small strings we check whether the resource contains only
1317  // one byte characters. If yes, we use a different string map.
1318  static const size_t kOneByteCheckLengthLimit = 32;
1319  bool is_one_byte =
1320  length <= kOneByteCheckLengthLimit &&
1321  String::IsOneByte(resource->data(), static_cast<int>(length));
1322  Handle<Map> map;
1323  if (!resource->IsCacheable()) {
1324  map = is_one_byte ? uncached_external_string_with_one_byte_data_map()
1325  : uncached_external_string_map();
1326  } else {
1327  map = is_one_byte ? external_string_with_one_byte_data_map()
1328  : external_string_map();
1329  }
1330  Handle<ExternalTwoByteString> external_string(
1331  ExternalTwoByteString::cast(New(map, TENURED)), isolate());
1332  external_string->set_length(static_cast<int>(length));
1333  external_string->set_hash_field(String::kEmptyHashField);
1334  external_string->SetResource(isolate(), resource);
1335  isolate()->heap()->RegisterExternalString(*external_string);
1336 
1337  return external_string;
1338 }
1339 
1340 Handle<ExternalOneByteString> Factory::NewNativeSourceString(
1341  const ExternalOneByteString::Resource* resource) {
1342  size_t length = resource->length();
1343  DCHECK_LE(length, static_cast<size_t>(String::kMaxLength));
1344 
1345  Handle<Map> map = native_source_string_map();
1346  Handle<ExternalOneByteString> external_string(
1347  ExternalOneByteString::cast(New(map, TENURED)), isolate());
1348  external_string->set_length(static_cast<int>(length));
1349  external_string->set_hash_field(String::kEmptyHashField);
1350  external_string->SetResource(isolate(), resource);
1351  isolate()->heap()->RegisterExternalString(*external_string);
1352 
1353  return external_string;
1354 }
1355 
1356 Handle<JSStringIterator> Factory::NewJSStringIterator(Handle<String> string) {
1357  Handle<Map> map(isolate()->native_context()->initial_string_iterator_map(),
1358  isolate());
1359  Handle<String> flat_string = String::Flatten(isolate(), string);
1360  Handle<JSStringIterator> iterator =
1361  Handle<JSStringIterator>::cast(NewJSObjectFromMap(map));
1362  iterator->set_string(*flat_string);
1363  iterator->set_index(0);
1364 
1365  return iterator;
1366 }
1367 
1368 Handle<Symbol> Factory::NewSymbol(PretenureFlag flag) {
1369  DCHECK(flag != NOT_TENURED);
1370  // Statically ensure that it is safe to allocate symbols in paged spaces.
1371  STATIC_ASSERT(Symbol::kSize <= kMaxRegularHeapObjectSize);
1372 
1373  HeapObject* result =
1374  AllocateRawWithImmortalMap(Symbol::kSize, flag, *symbol_map());
1375 
1376  // Generate a random hash value.
1377  int hash = isolate()->GenerateIdentityHash(Name::kHashBitMask);
1378 
1379  Handle<Symbol> symbol(Symbol::cast(result), isolate());
1380  symbol->set_hash_field(Name::kIsNotArrayIndexMask |
1381  (hash << Name::kHashShift));
1382  symbol->set_name(*undefined_value());
1383  symbol->set_flags(0);
1384  DCHECK(!symbol->is_private());
1385  return symbol;
1386 }
1387 
1388 Handle<Symbol> Factory::NewPrivateSymbol(PretenureFlag flag) {
1389  DCHECK(flag != NOT_TENURED);
1390  Handle<Symbol> symbol = NewSymbol(flag);
1391  symbol->set_is_private(true);
1392  return symbol;
1393 }
1394 
1395 Handle<Symbol> Factory::NewPrivateNameSymbol() {
1396  Handle<Symbol> symbol = NewSymbol();
1397  symbol->set_is_private_name();
1398  return symbol;
1399 }
1400 
1401 Handle<Context> Factory::NewContext(RootIndex map_root_index, int size,
1402  int variadic_part_length,
1403  PretenureFlag pretenure) {
1404  DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
1405  DCHECK_LE(Context::kTodoHeaderSize, size);
1406  DCHECK(IsAligned(size, kTaggedSize));
1407  DCHECK_LE(Context::MIN_CONTEXT_SLOTS, variadic_part_length);
1408  DCHECK_LE(Context::SizeFor(variadic_part_length), size);
1409 
1410  Map map = Map::cast(isolate()->root(map_root_index));
1411  HeapObject* result = AllocateRawWithImmortalMap(size, pretenure, map);
1412  Handle<Context> context(Context::cast(result), isolate());
1413  context->set_length(variadic_part_length);
1414  DCHECK_EQ(context->SizeFromMap(map), size);
1415  if (size > Context::kTodoHeaderSize) {
1416  ObjectSlot start = context->RawField(Context::kTodoHeaderSize);
1417  ObjectSlot end = context->RawField(size);
1418  size_t slot_count = end - start;
1419  MemsetPointer(start, *undefined_value(), slot_count);
1420  }
1421  return context;
1422 }
1423 
1424 Handle<NativeContext> Factory::NewNativeContext() {
1425  Handle<NativeContext> context = Handle<NativeContext>::cast(
1426  NewContext(RootIndex::kNativeContextMap, NativeContext::kSize,
1427  NativeContext::NATIVE_CONTEXT_SLOTS, TENURED));
1428  context->set_scope_info(ReadOnlyRoots(isolate()).empty_scope_info());
1429  context->set_previous(Context::unchecked_cast(Smi::zero()));
1430  context->set_extension(*the_hole_value());
1431  context->set_native_context(*context);
1432  context->set_errors_thrown(Smi::zero());
1433  context->set_math_random_index(Smi::zero());
1434  context->set_serialized_objects(*empty_fixed_array());
1435  context->set_microtask_queue(nullptr);
1436  return context;
1437 }
1438 
1439 Handle<Context> Factory::NewScriptContext(Handle<NativeContext> outer,
1440  Handle<ScopeInfo> scope_info) {
1441  DCHECK_EQ(scope_info->scope_type(), SCRIPT_SCOPE);
1442  int variadic_part_length = scope_info->ContextLength();
1443  Handle<Context> context = NewContext(RootIndex::kScriptContextMap,
1444  Context::SizeFor(variadic_part_length),
1445  variadic_part_length, TENURED);
1446  context->set_scope_info(*scope_info);
1447  context->set_previous(*outer);
1448  context->set_extension(*the_hole_value());
1449  context->set_native_context(*outer);
1450  DCHECK(context->IsScriptContext());
1451  return context;
1452 }
1453 
1454 Handle<ScriptContextTable> Factory::NewScriptContextTable() {
1455  Handle<ScriptContextTable> context_table =
1456  NewFixedArrayWithMap<ScriptContextTable>(
1457  RootIndex::kScriptContextTableMap, ScriptContextTable::kMinLength);
1458  context_table->set_used(0);
1459  return context_table;
1460 }
1461 
1462 Handle<Context> Factory::NewModuleContext(Handle<Module> module,
1463  Handle<NativeContext> outer,
1464  Handle<ScopeInfo> scope_info) {
1465  DCHECK_EQ(scope_info->scope_type(), MODULE_SCOPE);
1466  int variadic_part_length = scope_info->ContextLength();
1467  Handle<Context> context = NewContext(RootIndex::kModuleContextMap,
1468  Context::SizeFor(variadic_part_length),
1469  variadic_part_length, TENURED);
1470  context->set_scope_info(*scope_info);
1471  context->set_previous(*outer);
1472  context->set_extension(*module);
1473  context->set_native_context(*outer);
1474  DCHECK(context->IsModuleContext());
1475  return context;
1476 }
1477 
1478 Handle<Context> Factory::NewFunctionContext(Handle<Context> outer,
1479  Handle<ScopeInfo> scope_info) {
1480  RootIndex mapRootIndex;
1481  switch (scope_info->scope_type()) {
1482  case EVAL_SCOPE:
1483  mapRootIndex = RootIndex::kEvalContextMap;
1484  break;
1485  case FUNCTION_SCOPE:
1486  mapRootIndex = RootIndex::kFunctionContextMap;
1487  break;
1488  default:
1489  UNREACHABLE();
1490  }
1491  int variadic_part_length = scope_info->ContextLength();
1492  Handle<Context> context =
1493  NewContext(mapRootIndex, Context::SizeFor(variadic_part_length),
1494  variadic_part_length, NOT_TENURED);
1495  context->set_scope_info(*scope_info);
1496  context->set_previous(*outer);
1497  context->set_extension(*the_hole_value());
1498  context->set_native_context(outer->native_context());
1499  return context;
1500 }
1501 
1502 Handle<Context> Factory::NewCatchContext(Handle<Context> previous,
1503  Handle<ScopeInfo> scope_info,
1504  Handle<Object> thrown_object) {
1505  DCHECK_EQ(scope_info->scope_type(), CATCH_SCOPE);
1506  STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX);
1507  // TODO(ishell): Take the details from CatchContext class.
1508  int variadic_part_length = Context::MIN_CONTEXT_SLOTS + 1;
1509  Handle<Context> context = NewContext(RootIndex::kCatchContextMap,
1510  Context::SizeFor(variadic_part_length),
1511  variadic_part_length, NOT_TENURED);
1512  context->set_scope_info(*scope_info);
1513  context->set_previous(*previous);
1514  context->set_extension(*the_hole_value());
1515  context->set_native_context(previous->native_context());
1516  context->set(Context::THROWN_OBJECT_INDEX, *thrown_object);
1517  return context;
1518 }
1519 
1520 Handle<Context> Factory::NewDebugEvaluateContext(Handle<Context> previous,
1521  Handle<ScopeInfo> scope_info,
1522  Handle<JSReceiver> extension,
1523  Handle<Context> wrapped,
1524  Handle<StringSet> whitelist) {
1525  STATIC_ASSERT(Context::WHITE_LIST_INDEX == Context::MIN_CONTEXT_SLOTS + 1);
1526  DCHECK(scope_info->IsDebugEvaluateScope());
1527  Handle<HeapObject> ext = extension.is_null()
1528  ? Handle<HeapObject>::cast(the_hole_value())
1529  : Handle<HeapObject>::cast(extension);
1530  // TODO(ishell): Take the details from DebugEvaluateContextContext class.
1531  int variadic_part_length = Context::MIN_CONTEXT_SLOTS + 2;
1532  Handle<Context> c = NewContext(RootIndex::kDebugEvaluateContextMap,
1533  Context::SizeFor(variadic_part_length),
1534  variadic_part_length, NOT_TENURED);
1535  c->set_scope_info(*scope_info);
1536  c->set_previous(*previous);
1537  c->set_native_context(previous->native_context());
1538  c->set_extension(*ext);
1539  if (!wrapped.is_null()) c->set(Context::WRAPPED_CONTEXT_INDEX, *wrapped);
1540  if (!whitelist.is_null()) c->set(Context::WHITE_LIST_INDEX, *whitelist);
1541  return c;
1542 }
1543 
1544 Handle<Context> Factory::NewWithContext(Handle<Context> previous,
1545  Handle<ScopeInfo> scope_info,
1546  Handle<JSReceiver> extension) {
1547  DCHECK_EQ(scope_info->scope_type(), WITH_SCOPE);
1548  // TODO(ishell): Take the details from WithContext class.
1549  int variadic_part_length = Context::MIN_CONTEXT_SLOTS;
1550  Handle<Context> context = NewContext(RootIndex::kWithContextMap,
1551  Context::SizeFor(variadic_part_length),
1552  variadic_part_length, NOT_TENURED);
1553  context->set_scope_info(*scope_info);
1554  context->set_previous(*previous);
1555  context->set_extension(*extension);
1556  context->set_native_context(previous->native_context());
1557  return context;
1558 }
1559 
1560 Handle<Context> Factory::NewBlockContext(Handle<Context> previous,
1561  Handle<ScopeInfo> scope_info) {
1562  DCHECK_EQ(scope_info->scope_type(), BLOCK_SCOPE);
1563  int variadic_part_length = scope_info->ContextLength();
1564  Handle<Context> context = NewContext(RootIndex::kBlockContextMap,
1565  Context::SizeFor(variadic_part_length),
1566  variadic_part_length, NOT_TENURED);
1567  context->set_scope_info(*scope_info);
1568  context->set_previous(*previous);
1569  context->set_extension(*the_hole_value());
1570  context->set_native_context(previous->native_context());
1571  return context;
1572 }
1573 
1574 Handle<Context> Factory::NewBuiltinContext(Handle<NativeContext> native_context,
1575  int variadic_part_length) {
1576  DCHECK_LE(Context::MIN_CONTEXT_SLOTS, variadic_part_length);
1577  Handle<Context> context = NewContext(RootIndex::kFunctionContextMap,
1578  Context::SizeFor(variadic_part_length),
1579  variadic_part_length, NOT_TENURED);
1580  context->set_scope_info(ReadOnlyRoots(isolate()).empty_scope_info());
1581  context->set_previous(*native_context);
1582  context->set_extension(*the_hole_value());
1583  context->set_native_context(*native_context);
1584  return context;
1585 }
1586 
1587 Handle<Struct> Factory::NewStruct(InstanceType type, PretenureFlag pretenure) {
1588  Map map;
1589  switch (type) {
1590 #define MAKE_CASE(TYPE, Name, name) \
1591  case TYPE: \
1592  map = *name##_map(); \
1593  break;
1594  STRUCT_LIST(MAKE_CASE)
1595 #undef MAKE_CASE
1596  default:
1597  UNREACHABLE();
1598  }
1599  int size = map->instance_size();
1600  HeapObject* result = AllocateRawWithImmortalMap(size, pretenure, map);
1601  Handle<Struct> str(Struct::cast(result), isolate());
1602  str->InitializeBody(size);
1603  return str;
1604 }
1605 
1606 Handle<AliasedArgumentsEntry> Factory::NewAliasedArgumentsEntry(
1607  int aliased_context_slot) {
1608  Handle<AliasedArgumentsEntry> entry = Handle<AliasedArgumentsEntry>::cast(
1609  NewStruct(ALIASED_ARGUMENTS_ENTRY_TYPE, NOT_TENURED));
1610  entry->set_aliased_context_slot(aliased_context_slot);
1611  return entry;
1612 }
1613 
1614 Handle<AccessorInfo> Factory::NewAccessorInfo() {
1615  Handle<AccessorInfo> info =
1616  Handle<AccessorInfo>::cast(NewStruct(ACCESSOR_INFO_TYPE, TENURED));
1617  info->set_name(*empty_string());
1618  info->set_flags(0); // Must clear the flags, it was initialized as undefined.
1619  info->set_is_sloppy(true);
1620  info->set_initial_property_attributes(NONE);
1621  return info;
1622 }
1623 
1624 Handle<Script> Factory::NewScript(Handle<String> source, PretenureFlag tenure) {
1625  return NewScriptWithId(source, isolate()->heap()->NextScriptId(), tenure);
1626 }
1627 
1628 Handle<Script> Factory::NewScriptWithId(Handle<String> source, int script_id,
1629  PretenureFlag tenure) {
1630  DCHECK(tenure == TENURED || tenure == TENURED_READ_ONLY);
1631  // Create and initialize script object.
1632  Heap* heap = isolate()->heap();
1633  ReadOnlyRoots roots(heap);
1634  Handle<Script> script = Handle<Script>::cast(NewStruct(SCRIPT_TYPE, tenure));
1635  script->set_source(*source);
1636  script->set_name(roots.undefined_value());
1637  script->set_id(script_id);
1638  script->set_line_offset(0);
1639  script->set_column_offset(0);
1640  script->set_context_data(roots.undefined_value());
1641  script->set_type(Script::TYPE_NORMAL);
1642  script->set_line_ends(roots.undefined_value());
1643  script->set_eval_from_shared_or_wrapped_arguments(roots.undefined_value());
1644  script->set_eval_from_position(0);
1645  script->set_shared_function_infos(*empty_weak_fixed_array(),
1646  SKIP_WRITE_BARRIER);
1647  script->set_flags(0);
1648  script->set_host_defined_options(*empty_fixed_array());
1649  Handle<WeakArrayList> scripts = script_list();
1650  scripts = WeakArrayList::AddToEnd(isolate(), scripts,
1651  MaybeObjectHandle::Weak(script));
1652  heap->set_script_list(*scripts);
1653  LOG(isolate(), ScriptEvent(Logger::ScriptEventType::kCreate, script_id));
1654  return script;
1655 }
1656 
1657 Handle<Script> Factory::CloneScript(Handle<Script> script) {
1658  Heap* heap = isolate()->heap();
1659  int script_id = isolate()->heap()->NextScriptId();
1660  Handle<Script> new_script =
1661  Handle<Script>::cast(NewStruct(SCRIPT_TYPE, TENURED));
1662  new_script->set_source(script->source());
1663  new_script->set_name(script->name());
1664  new_script->set_id(script_id);
1665  new_script->set_line_offset(script->line_offset());
1666  new_script->set_column_offset(script->column_offset());
1667  new_script->set_context_data(script->context_data());
1668  new_script->set_type(script->type());
1669  new_script->set_line_ends(ReadOnlyRoots(heap).undefined_value());
1670  new_script->set_eval_from_shared_or_wrapped_arguments(
1671  script->eval_from_shared_or_wrapped_arguments());
1672  new_script->set_shared_function_infos(*empty_weak_fixed_array(),
1673  SKIP_WRITE_BARRIER);
1674  new_script->set_eval_from_position(script->eval_from_position());
1675  new_script->set_flags(script->flags());
1676  new_script->set_host_defined_options(script->host_defined_options());
1677  Handle<WeakArrayList> scripts = script_list();
1678  scripts = WeakArrayList::AddToEnd(isolate(), scripts,
1679  MaybeObjectHandle::Weak(new_script));
1680  heap->set_script_list(*scripts);
1681  LOG(isolate(), ScriptEvent(Logger::ScriptEventType::kCreate, script_id));
1682  return new_script;
1683 }
1684 
1685 Handle<CallableTask> Factory::NewCallableTask(Handle<JSReceiver> callable,
1686  Handle<Context> context) {
1687  DCHECK(callable->IsCallable());
1688  Handle<CallableTask> microtask =
1689  Handle<CallableTask>::cast(NewStruct(CALLABLE_TASK_TYPE));
1690  microtask->set_callable(*callable);
1691  microtask->set_context(*context);
1692  return microtask;
1693 }
1694 
1695 Handle<CallbackTask> Factory::NewCallbackTask(Handle<Foreign> callback,
1696  Handle<Foreign> data) {
1697  Handle<CallbackTask> microtask =
1698  Handle<CallbackTask>::cast(NewStruct(CALLBACK_TASK_TYPE));
1699  microtask->set_callback(*callback);
1700  microtask->set_data(*data);
1701  return microtask;
1702 }
1703 
1704 Handle<PromiseResolveThenableJobTask> Factory::NewPromiseResolveThenableJobTask(
1705  Handle<JSPromise> promise_to_resolve, Handle<JSReceiver> then,
1706  Handle<JSReceiver> thenable, Handle<Context> context) {
1707  DCHECK(then->IsCallable());
1708  Handle<PromiseResolveThenableJobTask> microtask =
1709  Handle<PromiseResolveThenableJobTask>::cast(
1710  NewStruct(PROMISE_RESOLVE_THENABLE_JOB_TASK_TYPE));
1711  microtask->set_promise_to_resolve(*promise_to_resolve);
1712  microtask->set_then(*then);
1713  microtask->set_thenable(*thenable);
1714  microtask->set_context(*context);
1715  return microtask;
1716 }
1717 
1718 Handle<WeakFactoryCleanupJobTask> Factory::NewWeakFactoryCleanupJobTask(
1719  Handle<JSWeakFactory> weak_factory) {
1720  Handle<WeakFactoryCleanupJobTask> microtask =
1721  Handle<WeakFactoryCleanupJobTask>::cast(
1722  NewStruct(WEAK_FACTORY_CLEANUP_JOB_TASK_TYPE));
1723  microtask->set_factory(*weak_factory);
1724  return microtask;
1725 }
1726 
1727 Handle<Foreign> Factory::NewForeign(Address addr, PretenureFlag pretenure) {
1728  // Statically ensure that it is safe to allocate foreigns in paged spaces.
1729  STATIC_ASSERT(Foreign::kSize <= kMaxRegularHeapObjectSize);
1730  Map map = *foreign_map();
1731  HeapObject* result =
1732  AllocateRawWithImmortalMap(map->instance_size(), pretenure, map);
1733  Handle<Foreign> foreign(Foreign::cast(result), isolate());
1734  foreign->set_foreign_address(addr);
1735  return foreign;
1736 }
1737 
1738 Handle<ByteArray> Factory::NewByteArray(int length, PretenureFlag pretenure) {
1739  DCHECK_LE(0, length);
1740  if (length > ByteArray::kMaxLength) {
1741  isolate()->heap()->FatalProcessOutOfMemory("invalid array length");
1742  }
1743  int size = ByteArray::SizeFor(length);
1744  HeapObject* result =
1745  AllocateRawWithImmortalMap(size, pretenure, *byte_array_map());
1746  Handle<ByteArray> array(ByteArray::cast(result), isolate());
1747  array->set_length(length);
1748  array->clear_padding();
1749  return array;
1750 }
1751 
1752 Handle<BytecodeArray> Factory::NewBytecodeArray(
1753  int length, const byte* raw_bytecodes, int frame_size, int parameter_count,
1754  Handle<FixedArray> constant_pool) {
1755  DCHECK_LE(0, length);
1756  if (length > BytecodeArray::kMaxLength) {
1757  isolate()->heap()->FatalProcessOutOfMemory("invalid array length");
1758  }
1759  // Bytecode array is pretenured, so constant pool array should be too.
1760  DCHECK(!Heap::InNewSpace(*constant_pool));
1761 
1762  int size = BytecodeArray::SizeFor(length);
1763  HeapObject* result =
1764  AllocateRawWithImmortalMap(size, TENURED, *bytecode_array_map());
1765  Handle<BytecodeArray> instance(BytecodeArray::cast(result), isolate());
1766  instance->set_length(length);
1767  instance->set_frame_size(frame_size);
1768  instance->set_parameter_count(parameter_count);
1769  instance->set_incoming_new_target_or_generator_register(
1770  interpreter::Register::invalid_value());
1771  instance->set_interrupt_budget(interpreter::Interpreter::InterruptBudget());
1772  instance->set_osr_loop_nesting_level(0);
1773  instance->set_bytecode_age(BytecodeArray::kNoAgeBytecodeAge);
1774  instance->set_constant_pool(*constant_pool);
1775  instance->set_handler_table(*empty_byte_array());
1776  instance->set_source_position_table(*empty_byte_array());
1777  CopyBytes(reinterpret_cast<byte*>(instance->GetFirstBytecodeAddress()),
1778  raw_bytecodes, length);
1779  instance->clear_padding();
1780 
1781  return instance;
1782 }
1783 
1784 Handle<FixedTypedArrayBase> Factory::NewFixedTypedArrayWithExternalPointer(
1785  int length, ExternalArrayType array_type, void* external_pointer,
1786  PretenureFlag pretenure) {
1787  // TODO(7881): Smi length check
1788  DCHECK(0 <= length && length <= Smi::kMaxValue);
1789  int size = FixedTypedArrayBase::kHeaderSize;
1790  HeapObject* result = AllocateRawWithImmortalMap(
1791  size, pretenure,
1792  ReadOnlyRoots(isolate()).MapForFixedTypedArray(array_type));
1793  Handle<FixedTypedArrayBase> elements(FixedTypedArrayBase::cast(result),
1794  isolate());
1795  elements->set_base_pointer(Smi::kZero, SKIP_WRITE_BARRIER);
1796  elements->set_external_pointer(external_pointer, SKIP_WRITE_BARRIER);
1797  elements->set_length(length);
1798  return elements;
1799 }
1800 
1801 Handle<FixedTypedArrayBase> Factory::NewFixedTypedArray(
1802  size_t length, size_t byte_length, ExternalArrayType array_type,
1803  bool initialize, PretenureFlag pretenure) {
1804  // TODO(7881): Smi length check
1805  DCHECK(0 <= length && length <= Smi::kMaxValue);
1806  CHECK(byte_length <= kMaxInt - FixedTypedArrayBase::kDataOffset);
1807  size_t size =
1808  OBJECT_POINTER_ALIGN(byte_length + FixedTypedArrayBase::kDataOffset);
1809  Map map = ReadOnlyRoots(isolate()).MapForFixedTypedArray(array_type);
1810  AllocationAlignment alignment =
1811  array_type == kExternalFloat64Array ? kDoubleAligned : kWordAligned;
1812  HeapObject* object = AllocateRawWithImmortalMap(static_cast<int>(size),
1813  pretenure, map, alignment);
1814 
1815  Handle<FixedTypedArrayBase> elements(FixedTypedArrayBase::cast(object),
1816  isolate());
1817  elements->set_base_pointer(*elements, SKIP_WRITE_BARRIER);
1818  elements->set_external_pointer(
1819  reinterpret_cast<void*>(
1820  ExternalReference::fixed_typed_array_base_data_offset().address()),
1821  SKIP_WRITE_BARRIER);
1822  elements->set_length(static_cast<int>(length));
1823  if (initialize) memset(elements->DataPtr(), 0, elements->DataSize());
1824  return elements;
1825 }
1826 
1827 Handle<Cell> Factory::NewCell(Handle<Object> value) {
1828  AllowDeferredHandleDereference convert_to_cell;
1829  STATIC_ASSERT(Cell::kSize <= kMaxRegularHeapObjectSize);
1830  HeapObject* result =
1831  AllocateRawWithImmortalMap(Cell::kSize, TENURED, *cell_map());
1832  Handle<Cell> cell(Cell::cast(result), isolate());
1833  cell->set_value(*value);
1834  return cell;
1835 }
1836 
1837 Handle<FeedbackCell> Factory::NewNoClosuresCell(Handle<HeapObject> value) {
1838  AllowDeferredHandleDereference convert_to_cell;
1839  HeapObject* result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED,
1840  *no_closures_cell_map());
1841  Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate());
1842  cell->set_value(*value);
1843  return cell;
1844 }
1845 
1846 Handle<FeedbackCell> Factory::NewOneClosureCell(Handle<HeapObject> value) {
1847  AllowDeferredHandleDereference convert_to_cell;
1848  HeapObject* result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED,
1849  *one_closure_cell_map());
1850  Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate());
1851  cell->set_value(*value);
1852  return cell;
1853 }
1854 
1855 Handle<FeedbackCell> Factory::NewManyClosuresCell(Handle<HeapObject> value) {
1856  AllowDeferredHandleDereference convert_to_cell;
1857  HeapObject* result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED,
1858  *many_closures_cell_map());
1859  Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate());
1860  cell->set_value(*value);
1861  return cell;
1862 }
1863 
1864 Handle<FeedbackCell> Factory::NewNoFeedbackCell() {
1865  AllowDeferredHandleDereference convert_to_cell;
1866  HeapObject* result = AllocateRawWithImmortalMap(FeedbackCell::kSize, TENURED,
1867  *no_feedback_cell_map());
1868  Handle<FeedbackCell> cell(FeedbackCell::cast(result), isolate());
1869  // Set the value to undefined. We wouldn't allocate feedback vectors with
1870  // NoFeedbackCell map type.
1871  cell->set_value(*undefined_value());
1872  return cell;
1873 }
1874 
1875 Handle<PropertyCell> Factory::NewPropertyCell(Handle<Name> name,
1876  PretenureFlag pretenure) {
1877  DCHECK(name->IsUniqueName());
1878  STATIC_ASSERT(PropertyCell::kSize <= kMaxRegularHeapObjectSize);
1879  HeapObject* result = AllocateRawWithImmortalMap(
1880  PropertyCell::kSize, pretenure, *global_property_cell_map());
1881  Handle<PropertyCell> cell(PropertyCell::cast(result), isolate());
1882  cell->set_dependent_code(DependentCode::cast(*empty_weak_fixed_array()),
1883  SKIP_WRITE_BARRIER);
1884  cell->set_property_details(PropertyDetails(Smi::zero()));
1885  cell->set_name(*name);
1886  cell->set_value(*the_hole_value());
1887  return cell;
1888 }
1889 
1890 Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors,
1891  int slack) {
1892  int number_of_all_descriptors = number_of_descriptors + slack;
1893  // Zero-length case must be handled outside.
1894  DCHECK_LT(0, number_of_all_descriptors);
1895  int size = DescriptorArray::SizeFor(number_of_all_descriptors);
1896  DCHECK_LT(size, kMaxRegularHeapObjectSize);
1897  HeapObject* obj =
1898  isolate()->heap()->AllocateRawWithRetryOrFail(size, OLD_SPACE);
1899  obj->set_map_after_allocation(*descriptor_array_map(), SKIP_WRITE_BARRIER);
1900  DescriptorArray* array = DescriptorArray::cast(obj);
1901  array->Initialize(*empty_enum_cache(), *undefined_value(),
1902  number_of_descriptors, slack);
1903  return Handle<DescriptorArray>(array, isolate());
1904 }
1905 
1906 Handle<TransitionArray> Factory::NewTransitionArray(int number_of_transitions,
1907  int slack) {
1908  int capacity = TransitionArray::LengthFor(number_of_transitions + slack);
1909  Handle<TransitionArray> array = NewWeakFixedArrayWithMap<TransitionArray>(
1910  RootIndex::kTransitionArrayMap, capacity, TENURED);
1911  // Transition arrays are tenured. When black allocation is on we have to
1912  // add the transition array to the list of encountered_transition_arrays.
1913  Heap* heap = isolate()->heap();
1914  if (heap->incremental_marking()->black_allocation()) {
1915  heap->mark_compact_collector()->AddTransitionArray(*array);
1916  }
1917  array->WeakFixedArray::Set(TransitionArray::kPrototypeTransitionsIndex,
1918  MaybeObject::FromObject(Smi::kZero));
1919  array->WeakFixedArray::Set(
1920  TransitionArray::kTransitionLengthIndex,
1921  MaybeObject::FromObject(Smi::FromInt(number_of_transitions)));
1922  return array;
1923 }
1924 
1925 Handle<AllocationSite> Factory::NewAllocationSite(bool with_weak_next) {
1926  Handle<Map> map = with_weak_next ? allocation_site_map()
1927  : allocation_site_without_weaknext_map();
1928  Handle<AllocationSite> site(AllocationSite::cast(New(map, TENURED)),
1929  isolate());
1930  site->Initialize();
1931 
1932  if (with_weak_next) {
1933  // Link the site
1934  site->set_weak_next(isolate()->heap()->allocation_sites_list());
1935  isolate()->heap()->set_allocation_sites_list(*site);
1936  }
1937  return site;
1938 }
1939 
1940 Handle<Map> Factory::NewMap(InstanceType type, int instance_size,
1941  ElementsKind elements_kind,
1942  int inobject_properties) {
1943  STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
1944  DCHECK_IMPLIES(InstanceTypeChecker::IsJSObject(type) &&
1945  !Map::CanHaveFastTransitionableElementsKind(type),
1946  IsDictionaryElementsKind(elements_kind) ||
1947  IsTerminalElementsKind(elements_kind));
1948  HeapObject* result =
1949  isolate()->heap()->AllocateRawWithRetryOrFail(Map::kSize, MAP_SPACE);
1950  result->set_map_after_allocation(*meta_map(), SKIP_WRITE_BARRIER);
1951  return handle(InitializeMap(Map::cast(result), type, instance_size,
1952  elements_kind, inobject_properties),
1953  isolate());
1954 }
1955 
1956 Map Factory::InitializeMap(Map map, InstanceType type, int instance_size,
1957  ElementsKind elements_kind,
1958  int inobject_properties) {
1959  map->set_instance_type(type);
1960  map->set_prototype(*null_value(), SKIP_WRITE_BARRIER);
1961  map->set_constructor_or_backpointer(*null_value(), SKIP_WRITE_BARRIER);
1962  map->set_instance_size(instance_size);
1963  if (map->IsJSObjectMap()) {
1964  DCHECK(!isolate()->heap()->InReadOnlySpace(map));
1965  map->SetInObjectPropertiesStartInWords(instance_size / kPointerSize -
1966  inobject_properties);
1967  DCHECK_EQ(map->GetInObjectProperties(), inobject_properties);
1968  map->set_prototype_validity_cell(*invalid_prototype_validity_cell());
1969  } else {
1970  DCHECK_EQ(inobject_properties, 0);
1971  map->set_inobject_properties_start_or_constructor_function_index(0);
1972  map->set_prototype_validity_cell(Smi::FromInt(Map::kPrototypeChainValid));
1973  }
1974  map->set_dependent_code(DependentCode::cast(*empty_weak_fixed_array()),
1975  SKIP_WRITE_BARRIER);
1976  map->set_raw_transitions(MaybeObject::FromSmi(Smi::zero()));
1977  map->SetInObjectUnusedPropertyFields(inobject_properties);
1978  map->set_instance_descriptors(*empty_descriptor_array());
1979  if (FLAG_unbox_double_fields) {
1980  map->set_layout_descriptor(LayoutDescriptor::FastPointerLayout());
1981  }
1982  // Must be called only after |instance_type|, |instance_size| and
1983  // |layout_descriptor| are set.
1984  map->set_visitor_id(Map::GetVisitorId(map));
1985  map->set_bit_field(0);
1986  map->set_bit_field2(Map::IsExtensibleBit::kMask);
1987  DCHECK(!map->is_in_retained_map_list());
1988  int bit_field3 = Map::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
1989  Map::OwnsDescriptorsBit::encode(true) |
1990  Map::ConstructionCounterBits::encode(Map::kNoSlackTracking);
1991  map->set_bit_field3(bit_field3);
1992  map->set_elements_kind(elements_kind);
1993  map->set_new_target_is_base(true);
1994  isolate()->counters()->maps_created()->Increment();
1995  if (FLAG_trace_maps) LOG(isolate(), MapCreate(map));
1996  return map;
1997 }
1998 
1999 Handle<JSObject> Factory::CopyJSObject(Handle<JSObject> source) {
2000  return CopyJSObjectWithAllocationSite(source, Handle<AllocationSite>());
2001 }
2002 
2003 Handle<JSObject> Factory::CopyJSObjectWithAllocationSite(
2004  Handle<JSObject> source, Handle<AllocationSite> site) {
2005  Handle<Map> map(source->map(), isolate());
2006 
2007  // We can only clone regexps, normal objects, api objects, errors or arrays.
2008  // Copying anything else will break invariants.
2009  CHECK(map->instance_type() == JS_REGEXP_TYPE ||
2010  map->instance_type() == JS_OBJECT_TYPE ||
2011  map->instance_type() == JS_ERROR_TYPE ||
2012  map->instance_type() == JS_ARRAY_TYPE ||
2013  map->instance_type() == JS_API_OBJECT_TYPE ||
2014  map->instance_type() == WASM_GLOBAL_TYPE ||
2015  map->instance_type() == WASM_INSTANCE_TYPE ||
2016  map->instance_type() == WASM_MEMORY_TYPE ||
2017  map->instance_type() == WASM_MODULE_TYPE ||
2018  map->instance_type() == WASM_TABLE_TYPE ||
2019  map->instance_type() == JS_SPECIAL_API_OBJECT_TYPE);
2020  DCHECK(site.is_null() || AllocationSite::CanTrack(map->instance_type()));
2021 
2022  int object_size = map->instance_size();
2023  int adjusted_object_size =
2024  site.is_null() ? object_size : object_size + AllocationMemento::kSize;
2025  HeapObject* raw_clone = isolate()->heap()->AllocateRawWithRetryOrFail(
2026  adjusted_object_size, NEW_SPACE);
2027 
2028  SLOW_DCHECK(Heap::InNewSpace(raw_clone));
2029  // Since we know the clone is allocated in new space, we can copy
2030  // the contents without worrying about updating the write barrier.
2031  Heap::CopyBlock(raw_clone->address(), source->address(), object_size);
2032  Handle<JSObject> clone(JSObject::cast(raw_clone), isolate());
2033 
2034  if (!site.is_null()) {
2035  AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
2036  reinterpret_cast<Address>(raw_clone) + object_size);
2037  InitializeAllocationMemento(alloc_memento, *site);
2038  }
2039 
2040  SLOW_DCHECK(clone->GetElementsKind() == source->GetElementsKind());
2041  FixedArrayBase elements = source->elements();
2042  // Update elements if necessary.
2043  if (elements->length() > 0) {
2044  FixedArrayBase elem;
2045  if (elements->map() == *fixed_cow_array_map()) {
2046  elem = elements;
2047  } else if (source->HasDoubleElements()) {
2048  elem = *CopyFixedDoubleArray(
2049  handle(FixedDoubleArray::cast(elements), isolate()));
2050  } else {
2051  elem = *CopyFixedArray(handle(FixedArray::cast(elements), isolate()));
2052  }
2053  clone->set_elements(elem);
2054  }
2055 
2056  // Update properties if necessary.
2057  if (source->HasFastProperties()) {
2058  PropertyArray properties = source->property_array();
2059  if (properties->length() > 0) {
2060  // TODO(gsathya): Do not copy hash code.
2061  Handle<PropertyArray> prop = CopyArrayWithMap(
2062  handle(properties, isolate()), handle(properties->map(), isolate()));
2063  clone->set_raw_properties_or_hash(*prop);
2064  }
2065  } else {
2066  Handle<FixedArray> properties(
2067  FixedArray::cast(source->property_dictionary()), isolate());
2068  Handle<FixedArray> prop = CopyFixedArray(properties);
2069  clone->set_raw_properties_or_hash(*prop);
2070  }
2071  return clone;
2072 }
2073 
2074 namespace {
2075 template <typename T>
2076 void initialize_length(Handle<T> array, int length) {
2077  array->set_length(length);
2078 }
2079 
2080 template <>
2081 void initialize_length<PropertyArray>(Handle<PropertyArray> array, int length) {
2082  array->initialize_length(length);
2083 }
2084 
2085 } // namespace
2086 
2087 template <typename T>
2088 Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) {
2089  int len = src->length();
2090  HeapObject* obj = AllocateRawFixedArray(len, NOT_TENURED);
2091  obj->set_map_after_allocation(*map, SKIP_WRITE_BARRIER);
2092 
2093  Handle<T> result(T::cast(obj), isolate());
2094  DisallowHeapAllocation no_gc;
2095  WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
2096 
2097  if (mode == SKIP_WRITE_BARRIER) {
2098  // Eliminate the write barrier if possible.
2099  Heap::CopyBlock(obj->address() + kPointerSize,
2100  src->address() + kPointerSize,
2101  T::SizeFor(len) - kPointerSize);
2102  } else {
2103  // Slow case: Just copy the content one-by-one.
2104  initialize_length(result, len);
2105  for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
2106  }
2107  return result;
2108 }
2109 
2110 template <typename T>
2111 Handle<T> Factory::CopyArrayAndGrow(Handle<T> src, int grow_by,
2112  PretenureFlag pretenure) {
2113  DCHECK_LT(0, grow_by);
2114  DCHECK_LE(grow_by, kMaxInt - src->length());
2115  int old_len = src->length();
2116  int new_len = old_len + grow_by;
2117  HeapObject* obj = AllocateRawFixedArray(new_len, pretenure);
2118  obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
2119 
2120  Handle<T> result(T::cast(obj), isolate());
2121  initialize_length(result, new_len);
2122 
2123  // Copy the content.
2124  DisallowHeapAllocation no_gc;
2125  WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc);
2126  for (int i = 0; i < old_len; i++) result->set(i, src->get(i), mode);
2127  MemsetPointer(result->data_start() + old_len, *undefined_value(), grow_by);
2128  return result;
2129 }
2130 
2131 Handle<FixedArray> Factory::CopyFixedArrayWithMap(Handle<FixedArray> array,
2132  Handle<Map> map) {
2133  return CopyArrayWithMap(array, map);
2134 }
2135 
2136 Handle<FixedArray> Factory::CopyFixedArrayAndGrow(Handle<FixedArray> array,
2137  int grow_by,
2138  PretenureFlag pretenure) {
2139  return CopyArrayAndGrow(array, grow_by, pretenure);
2140 }
2141 
2142 Handle<WeakFixedArray> Factory::CopyWeakFixedArrayAndGrow(
2143  Handle<WeakFixedArray> src, int grow_by, PretenureFlag pretenure) {
2144  DCHECK(
2145  !src->IsTransitionArray()); // Compacted by GC, this code doesn't work.
2146  int old_len = src->length();
2147  int new_len = old_len + grow_by;
2148  DCHECK_GE(new_len, old_len);
2149  HeapObject* obj = AllocateRawFixedArray(new_len, pretenure);
2150  DCHECK_EQ(old_len, src->length());
2151  obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
2152 
2153  WeakFixedArray* result = WeakFixedArray::cast(obj);
2154  result->set_length(new_len);
2155 
2156  // Copy the content.
2157  DisallowHeapAllocation no_gc;
2158  WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc);
2159  for (int i = 0; i < old_len; i++) result->Set(i, src->Get(i), mode);
2160  MemsetPointer(ObjectSlot(result->RawFieldOfElementAt(old_len)),
2161  ReadOnlyRoots(isolate()).undefined_value(), grow_by);
2162  return Handle<WeakFixedArray>(result, isolate());
2163 }
2164 
2165 Handle<WeakArrayList> Factory::CopyWeakArrayListAndGrow(
2166  Handle<WeakArrayList> src, int grow_by, PretenureFlag pretenure) {
2167  int old_capacity = src->capacity();
2168  int new_capacity = old_capacity + grow_by;
2169  DCHECK_GE(new_capacity, old_capacity);
2170  HeapObject* obj = AllocateRawWeakArrayList(new_capacity, pretenure);
2171  obj->set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
2172 
2173  WeakArrayList* result = WeakArrayList::cast(obj);
2174  result->set_length(src->length());
2175  result->set_capacity(new_capacity);
2176 
2177  // Copy the content.
2178  DisallowHeapAllocation no_gc;
2179  WriteBarrierMode mode = obj->GetWriteBarrierMode(no_gc);
2180  for (int i = 0; i < old_capacity; i++) result->Set(i, src->Get(i), mode);
2181  MemsetPointer(ObjectSlot(result->data_start() + old_capacity),
2182  ReadOnlyRoots(isolate()).undefined_value(), grow_by);
2183  return Handle<WeakArrayList>(result, isolate());
2184 }
2185 
2186 Handle<PropertyArray> Factory::CopyPropertyArrayAndGrow(
2187  Handle<PropertyArray> array, int grow_by, PretenureFlag pretenure) {
2188  return CopyArrayAndGrow(array, grow_by, pretenure);
2189 }
2190 
2191 Handle<FixedArray> Factory::CopyFixedArrayUpTo(Handle<FixedArray> array,
2192  int new_len,
2193  PretenureFlag pretenure) {
2194  DCHECK_LE(0, new_len);
2195  DCHECK_LE(new_len, array->length());
2196  if (new_len == 0) return empty_fixed_array();
2197 
2198  HeapObject* obj = AllocateRawFixedArray(new_len, pretenure);
2199  obj->set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
2200  Handle<FixedArray> result(FixedArray::cast(obj), isolate());
2201  result->set_length(new_len);
2202 
2203  // Copy the content.
2204  DisallowHeapAllocation no_gc;
2205  WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
2206  for (int i = 0; i < new_len; i++) result->set(i, array->get(i), mode);
2207  return result;
2208 }
2209 
2210 Handle<FixedArray> Factory::CopyFixedArray(Handle<FixedArray> array) {
2211  if (array->length() == 0) return array;
2212  return CopyArrayWithMap(array, handle(array->map(), isolate()));
2213 }
2214 
2215 Handle<FixedArray> Factory::CopyAndTenureFixedCOWArray(
2216  Handle<FixedArray> array) {
2217  DCHECK(Heap::InNewSpace(*array));
2218  Handle<FixedArray> result =
2219  CopyFixedArrayUpTo(array, array->length(), TENURED);
2220 
2221  // TODO(mvstanton): The map is set twice because of protection against calling
2222  // set() on a COW FixedArray. Issue v8:3221 created to track this, and
2223  // we might then be able to remove this whole method.
2224  result->set_map_after_allocation(*fixed_cow_array_map(), SKIP_WRITE_BARRIER);
2225  return result;
2226 }
2227 
2228 Handle<FixedDoubleArray> Factory::CopyFixedDoubleArray(
2229  Handle<FixedDoubleArray> array) {
2230  int len = array->length();
2231  if (len == 0) return array;
2232  Handle<FixedDoubleArray> result =
2233  Handle<FixedDoubleArray>::cast(NewFixedDoubleArray(len, NOT_TENURED));
2234  Heap::CopyBlock(
2235  result->address() + FixedDoubleArray::kLengthOffset,
2236  array->address() + FixedDoubleArray::kLengthOffset,
2237  FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset);
2238  return result;
2239 }
2240 
2241 Handle<FeedbackVector> Factory::CopyFeedbackVector(
2242  Handle<FeedbackVector> array) {
2243  int len = array->length();
2244  HeapObject* obj = AllocateRawWithImmortalMap(
2245  FeedbackVector::SizeFor(len), NOT_TENURED, *feedback_vector_map());
2246  Handle<FeedbackVector> result(FeedbackVector::cast(obj), isolate());
2247 
2248  DisallowHeapAllocation no_gc;
2249  WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
2250 
2251  // Eliminate the write barrier if possible.
2252  if (mode == SKIP_WRITE_BARRIER) {
2253  Heap::CopyBlock(result->address() + kPointerSize,
2254  result->address() + kPointerSize,
2255  FeedbackVector::SizeFor(len) - kPointerSize);
2256  } else {
2257  // Slow case: Just copy the content one-by-one.
2258  result->set_shared_function_info(array->shared_function_info());
2259  result->set_optimized_code_weak_or_smi(array->optimized_code_weak_or_smi());
2260  result->set_invocation_count(array->invocation_count());
2261  result->set_profiler_ticks(array->profiler_ticks());
2262  result->set_deopt_count(array->deopt_count());
2263  for (int i = 0; i < len; i++) result->set(i, array->get(i), mode);
2264  }
2265  return result;
2266 }
2267 
2268 Handle<Object> Factory::NewNumber(double value, PretenureFlag pretenure) {
2269  // Materialize as a SMI if possible.
2270  int32_t int_value;
2271  if (DoubleToSmiInteger(value, &int_value)) {
2272  return handle(Smi::FromInt(int_value), isolate());
2273  }
2274  return NewHeapNumber(value, pretenure);
2275 }
2276 
2277 Handle<Object> Factory::NewNumberFromInt(int32_t value,
2278  PretenureFlag pretenure) {
2279  if (Smi::IsValid(value)) return handle(Smi::FromInt(value), isolate());
2280  // Bypass NewNumber to avoid various redundant checks.
2281  return NewHeapNumber(FastI2D(value), pretenure);
2282 }
2283 
2284 Handle<Object> Factory::NewNumberFromUint(uint32_t value,
2285  PretenureFlag pretenure) {
2286  int32_t int32v = static_cast<int32_t>(value);
2287  if (int32v >= 0 && Smi::IsValid(int32v)) {
2288  return handle(Smi::FromInt(int32v), isolate());
2289  }
2290  return NewHeapNumber(FastUI2D(value), pretenure);
2291 }
2292 
2293 Handle<HeapNumber> Factory::NewHeapNumber(PretenureFlag pretenure) {
2294  STATIC_ASSERT(HeapNumber::kSize <= kMaxRegularHeapObjectSize);
2295  Map map = *heap_number_map();
2296  HeapObject* result = AllocateRawWithImmortalMap(HeapNumber::kSize, pretenure,
2297  map, kDoubleUnaligned);
2298  return handle(HeapNumber::cast(result), isolate());
2299 }
2300 
2301 Handle<MutableHeapNumber> Factory::NewMutableHeapNumber(
2302  PretenureFlag pretenure) {
2303  STATIC_ASSERT(HeapNumber::kSize <= kMaxRegularHeapObjectSize);
2304  Map map = *mutable_heap_number_map();
2305  HeapObject* result = AllocateRawWithImmortalMap(
2306  MutableHeapNumber::kSize, pretenure, map, kDoubleUnaligned);
2307  return handle(MutableHeapNumber::cast(result), isolate());
2308 }
2309 
2310 Handle<FreshlyAllocatedBigInt> Factory::NewBigInt(int length,
2311  PretenureFlag pretenure) {
2312  if (length < 0 || length > BigInt::kMaxLength) {
2313  isolate()->heap()->FatalProcessOutOfMemory("invalid BigInt length");
2314  }
2315  HeapObject* result = AllocateRawWithImmortalMap(BigInt::SizeFor(length),
2316  pretenure, *bigint_map());
2317  return handle(FreshlyAllocatedBigInt::cast(result), isolate());
2318 }
2319 
2320 Handle<Object> Factory::NewError(Handle<JSFunction> constructor,
2321  MessageTemplate template_index,
2322  Handle<Object> arg0, Handle<Object> arg1,
2323  Handle<Object> arg2) {
2324  HandleScope scope(isolate());
2325  if (isolate()->bootstrapper()->IsActive()) {
2326  // During bootstrapping we cannot construct error objects.
2327  return scope.CloseAndEscape(NewStringFromAsciiChecked(
2328  MessageFormatter::TemplateString(template_index)));
2329  }
2330 
2331  if (arg0.is_null()) arg0 = undefined_value();
2332  if (arg1.is_null()) arg1 = undefined_value();
2333  if (arg2.is_null()) arg2 = undefined_value();
2334 
2335  Handle<Object> result;
2336  if (!ErrorUtils::MakeGenericError(isolate(), constructor, template_index,
2337  arg0, arg1, arg2, SKIP_NONE)
2338  .ToHandle(&result)) {
2339  // If an exception is thrown while
2340  // running the factory method, use the exception as the result.
2341  DCHECK(isolate()->has_pending_exception());
2342  result = handle(isolate()->pending_exception(), isolate());
2343  isolate()->clear_pending_exception();
2344  }
2345 
2346  return scope.CloseAndEscape(result);
2347 }
2348 
2349 Handle<Object> Factory::NewError(Handle<JSFunction> constructor,
2350  Handle<String> message) {
2351  // Construct a new error object. If an exception is thrown, use the exception
2352  // as the result.
2353 
2354  Handle<Object> no_caller;
2355  MaybeHandle<Object> maybe_error =
2356  ErrorUtils::Construct(isolate(), constructor, constructor, message,
2357  SKIP_NONE, no_caller, false);
2358  if (maybe_error.is_null()) {
2359  DCHECK(isolate()->has_pending_exception());
2360  maybe_error = handle(isolate()->pending_exception(), isolate());
2361  isolate()->clear_pending_exception();
2362  }
2363 
2364  return maybe_error.ToHandleChecked();
2365 }
2366 
2367 Handle<Object> Factory::NewInvalidStringLengthError() {
2368  if (FLAG_abort_on_stack_or_string_length_overflow) {
2369  FATAL("Aborting on invalid string length");
2370  }
2371  // Invalidate the "string length" protector.
2372  if (isolate()->IsStringLengthOverflowIntact()) {
2373  isolate()->InvalidateStringLengthOverflowProtector();
2374  }
2375  return NewRangeError(MessageTemplate::kInvalidStringLength);
2376 }
2377 
2378 #define DEFINE_ERROR(NAME, name) \
2379  Handle<Object> Factory::New##NAME(MessageTemplate template_index, \
2380  Handle<Object> arg0, Handle<Object> arg1, \
2381  Handle<Object> arg2) { \
2382  return NewError(isolate()->name##_function(), template_index, arg0, arg1, \
2383  arg2); \
2384  }
2385 DEFINE_ERROR(Error, error)
2386 DEFINE_ERROR(EvalError, eval_error)
2387 DEFINE_ERROR(RangeError, range_error)
2388 DEFINE_ERROR(ReferenceError, reference_error)
2389 DEFINE_ERROR(SyntaxError, syntax_error)
2390 DEFINE_ERROR(TypeError, type_error)
2391 DEFINE_ERROR(WasmCompileError, wasm_compile_error)
2392 DEFINE_ERROR(WasmLinkError, wasm_link_error)
2393 DEFINE_ERROR(WasmRuntimeError, wasm_runtime_error)
2394 #undef DEFINE_ERROR
2395 
2396 Handle<JSFunction> Factory::NewFunction(Handle<Map> map,
2397  Handle<SharedFunctionInfo> info,
2398  Handle<Context> context,
2399  PretenureFlag pretenure) {
2400  Handle<JSFunction> function(JSFunction::cast(New(map, pretenure)), isolate());
2401 
2402  function->initialize_properties();
2403  function->initialize_elements();
2404  function->set_shared(*info);
2405  function->set_code(info->GetCode());
2406  function->set_context(*context);
2407  function->set_feedback_cell(*many_closures_cell());
2408  int header_size;
2409  if (map->has_prototype_slot()) {
2410  header_size = JSFunction::kSizeWithPrototype;
2411  function->set_prototype_or_initial_map(*the_hole_value());
2412  } else {
2413  header_size = JSFunction::kSizeWithoutPrototype;
2414  }
2415  InitializeJSObjectBody(function, map, header_size);
2416  return function;
2417 }
2418 
2419 Handle<JSFunction> Factory::NewFunctionForTest(Handle<String> name) {
2420  NewFunctionArgs args = NewFunctionArgs::ForFunctionWithoutCode(
2421  name, isolate()->sloppy_function_map(), LanguageMode::kSloppy);
2422  Handle<JSFunction> result = NewFunction(args);
2423  DCHECK(is_sloppy(result->shared()->language_mode()));
2424  return result;
2425 }
2426 
2427 Handle<JSFunction> Factory::NewFunction(const NewFunctionArgs& args) {
2428  DCHECK(!args.name_.is_null());
2429 
2430  // Create the SharedFunctionInfo.
2431  Handle<NativeContext> context(isolate()->native_context());
2432  Handle<Map> map = args.GetMap(isolate());
2433  Handle<SharedFunctionInfo> info =
2434  NewSharedFunctionInfo(args.name_, args.maybe_exported_function_data_,
2435  args.maybe_builtin_id_, kNormalFunction);
2436 
2437  // Proper language mode in shared function info will be set later.
2438  DCHECK(is_sloppy(info->language_mode()));
2439  DCHECK(!map->IsUndefined(isolate()));
2440 
2441 #ifdef DEBUG
2442  if (isolate()->bootstrapper()->IsActive()) {
2443  Handle<Code> code;
2444  DCHECK(
2445  // During bootstrapping some of these maps could be not created yet.
2446  (*map == context->get(Context::STRICT_FUNCTION_MAP_INDEX)) ||
2447  (*map ==
2448  context->get(Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX)) ||
2449  (*map ==
2450  context->get(
2451  Context::STRICT_FUNCTION_WITH_READONLY_PROTOTYPE_MAP_INDEX)) ||
2452  // Check if it's a creation of an empty or Proxy function during
2453  // bootstrapping.
2454  (args.maybe_builtin_id_ == Builtins::kEmptyFunction ||
2455  args.maybe_builtin_id_ == Builtins::kProxyConstructor));
2456  } else {
2457  DCHECK(
2458  (*map == *isolate()->sloppy_function_map()) ||
2459  (*map == *isolate()->sloppy_function_without_prototype_map()) ||
2460  (*map == *isolate()->sloppy_function_with_readonly_prototype_map()) ||
2461  (*map == *isolate()->strict_function_map()) ||
2462  (*map == *isolate()->strict_function_without_prototype_map()) ||
2463  (*map == *isolate()->native_function_map()));
2464  }
2465 #endif
2466 
2467  Handle<JSFunction> result = NewFunction(map, info, context);
2468 
2469  if (args.should_set_prototype_) {
2470  result->set_prototype_or_initial_map(
2471  *args.maybe_prototype_.ToHandleChecked());
2472  }
2473 
2474  if (args.should_set_language_mode_) {
2475  result->shared()->set_language_mode(args.language_mode_);
2476  }
2477 
2478  if (args.should_create_and_set_initial_map_) {
2479  ElementsKind elements_kind;
2480  switch (args.type_) {
2481  case JS_ARRAY_TYPE:
2482  elements_kind = PACKED_SMI_ELEMENTS;
2483  break;
2484  case JS_ARGUMENTS_TYPE:
2485  elements_kind = PACKED_ELEMENTS;
2486  break;
2487  default:
2488  elements_kind = TERMINAL_FAST_ELEMENTS_KIND;
2489  break;
2490  }
2491  Handle<Map> initial_map = NewMap(args.type_, args.instance_size_,
2492  elements_kind, args.inobject_properties_);
2493  result->shared()->set_expected_nof_properties(args.inobject_properties_);
2494  // TODO(littledan): Why do we have this is_generator test when
2495  // NewFunctionPrototype already handles finding an appropriately
2496  // shared prototype?
2497  Handle<Object> prototype = args.maybe_prototype_.ToHandleChecked();
2498  if (!IsResumableFunction(result->shared()->kind())) {
2499  if (prototype->IsTheHole(isolate())) {
2500  prototype = NewFunctionPrototype(result);
2501  }
2502  }
2503  JSFunction::SetInitialMap(result, initial_map, prototype);
2504  }
2505 
2506  return result;
2507 }
2508 
2509 Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) {
2510  // Make sure to use globals from the function's context, since the function
2511  // can be from a different context.
2512  Handle<NativeContext> native_context(function->context()->native_context(),
2513  isolate());
2514  Handle<Map> new_map;
2515  if (V8_UNLIKELY(IsAsyncGeneratorFunction(function->shared()->kind()))) {
2516  new_map = handle(native_context->async_generator_object_prototype_map(),
2517  isolate());
2518  } else if (IsResumableFunction(function->shared()->kind())) {
2519  // Generator and async function prototypes can share maps since they
2520  // don't have "constructor" properties.
2521  new_map =
2522  handle(native_context->generator_object_prototype_map(), isolate());
2523  } else {
2524  // Each function prototype gets a fresh map to avoid unwanted sharing of
2525  // maps between prototypes of different constructors.
2526  Handle<JSFunction> object_function(native_context->object_function(),
2527  isolate());
2528  DCHECK(object_function->has_initial_map());
2529  new_map = handle(object_function->initial_map(), isolate());
2530  }
2531 
2532  DCHECK(!new_map->is_prototype_map());
2533  Handle<JSObject> prototype = NewJSObjectFromMap(new_map);
2534 
2535  if (!IsResumableFunction(function->shared()->kind())) {
2536  JSObject::AddProperty(isolate(), prototype, constructor_string(), function,
2537  DONT_ENUM);
2538  }
2539 
2540  return prototype;
2541 }
2542 
2543 Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
2544  Handle<SharedFunctionInfo> info, Handle<Context> context,
2545  PretenureFlag pretenure) {
2546  Handle<Map> initial_map(
2547  Map::cast(context->native_context()->get(info->function_map_index())),
2548  isolate());
2549  return NewFunctionFromSharedFunctionInfo(initial_map, info, context,
2550  pretenure);
2551 }
2552 
2553 Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
2554  Handle<SharedFunctionInfo> info, Handle<Context> context,
2555  Handle<FeedbackCell> feedback_cell, PretenureFlag pretenure) {
2556  Handle<Map> initial_map(
2557  Map::cast(context->native_context()->get(info->function_map_index())),
2558  isolate());
2559  return NewFunctionFromSharedFunctionInfo(initial_map, info, context,
2560  feedback_cell, pretenure);
2561 }
2562 
2563 Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
2564  Handle<Map> initial_map, Handle<SharedFunctionInfo> info,
2565  Handle<Context> context, PretenureFlag pretenure) {
2566  DCHECK_EQ(JS_FUNCTION_TYPE, initial_map->instance_type());
2567  Handle<JSFunction> result =
2568  NewFunction(initial_map, info, context, pretenure);
2569 
2570  // Give compiler a chance to pre-initialize.
2571  Compiler::PostInstantiation(result, pretenure);
2572 
2573  return result;
2574 }
2575 
2576 Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
2577  Handle<Map> initial_map, Handle<SharedFunctionInfo> info,
2578  Handle<Context> context, Handle<FeedbackCell> feedback_cell,
2579  PretenureFlag pretenure) {
2580  DCHECK_EQ(JS_FUNCTION_TYPE, initial_map->instance_type());
2581  Handle<JSFunction> result =
2582  NewFunction(initial_map, info, context, pretenure);
2583 
2584  // Bump the closure count that is encoded in the feedback cell's map.
2585  if (feedback_cell->map() == *no_closures_cell_map()) {
2586  feedback_cell->set_map(*one_closure_cell_map());
2587  } else if (feedback_cell->map() == *one_closure_cell_map()) {
2588  feedback_cell->set_map(*many_closures_cell_map());
2589  } else {
2590  DCHECK(feedback_cell->map() == *no_feedback_cell_map() ||
2591  feedback_cell->map() == *many_closures_cell_map());
2592  }
2593 
2594  // Check that the optimized code in the feedback cell wasn't marked for
2595  // deoptimization while not pointed to by any live JSFunction.
2596  if (feedback_cell->value()->IsFeedbackVector()) {
2597  FeedbackVector::cast(feedback_cell->value())
2598  ->EvictOptimizedCodeMarkedForDeoptimization(
2599  *info, "new function from shared function info");
2600  }
2601  result->set_feedback_cell(*feedback_cell);
2602 
2603  // Give compiler a chance to pre-initialize.
2604  Compiler::PostInstantiation(result, pretenure);
2605 
2606  return result;
2607 }
2608 
2609 Handle<ScopeInfo> Factory::NewScopeInfo(int length) {
2610  return NewFixedArrayWithMap<ScopeInfo>(RootIndex::kScopeInfoMap, length,
2611  TENURED);
2612 }
2613 
2614 Handle<ModuleInfo> Factory::NewModuleInfo() {
2615  return NewFixedArrayWithMap<ModuleInfo>(RootIndex::kModuleInfoMap,
2616  ModuleInfo::kLength, TENURED);
2617 }
2618 
2619 Handle<PreParsedScopeData> Factory::NewPreParsedScopeData(int length) {
2620  int size = PreParsedScopeData::SizeFor(length);
2621  Handle<PreParsedScopeData> result(
2622  PreParsedScopeData::cast(AllocateRawWithImmortalMap(
2623  size, TENURED, *pre_parsed_scope_data_map())),
2624  isolate());
2625  result->set_scope_data(PodArray<uint8_t>::cast(*empty_byte_array()));
2626  result->set_length(length);
2627  MemsetPointer(result->child_data_start(), *null_value(), length);
2628 
2629  result->clear_padding();
2630  return result;
2631 }
2632 
2633 Handle<UncompiledDataWithoutPreParsedScope>
2634 Factory::NewUncompiledDataWithoutPreParsedScope(Handle<String> inferred_name,
2635  int32_t start_position,
2636  int32_t end_position,
2637  int32_t function_literal_id) {
2638  Handle<UncompiledDataWithoutPreParsedScope> result(
2639  UncompiledDataWithoutPreParsedScope::cast(
2640  New(uncompiled_data_without_pre_parsed_scope_map(), TENURED)),
2641  isolate());
2642  result->set_inferred_name(*inferred_name);
2643  result->set_start_position(start_position);
2644  result->set_end_position(end_position);
2645  result->set_function_literal_id(function_literal_id);
2646 
2647  result->clear_padding();
2648  return result;
2649 }
2650 
2651 Handle<UncompiledDataWithPreParsedScope>
2652 Factory::NewUncompiledDataWithPreParsedScope(
2653  Handle<String> inferred_name, int32_t start_position, int32_t end_position,
2654  int32_t function_literal_id,
2655  Handle<PreParsedScopeData> pre_parsed_scope_data) {
2656  Handle<UncompiledDataWithPreParsedScope> result(
2657  UncompiledDataWithPreParsedScope::cast(
2658  New(uncompiled_data_with_pre_parsed_scope_map(), TENURED)),
2659  isolate());
2660  result->set_inferred_name(*inferred_name);
2661  result->set_start_position(start_position);
2662  result->set_end_position(end_position);
2663  result->set_function_literal_id(function_literal_id);
2664  result->set_pre_parsed_scope_data(*pre_parsed_scope_data);
2665 
2666  result->clear_padding();
2667  return result;
2668 }
2669 
2670 Handle<JSObject> Factory::NewExternal(void* value) {
2671  Handle<Foreign> foreign = NewForeign(reinterpret_cast<Address>(value));
2672  Handle<JSObject> external = NewJSObjectFromMap(external_map());
2673  external->SetEmbedderField(0, *foreign);
2674  return external;
2675 }
2676 
2677 Handle<CodeDataContainer> Factory::NewCodeDataContainer(int flags) {
2678  Handle<CodeDataContainer> data_container(
2679  CodeDataContainer::cast(New(code_data_container_map(), TENURED)),
2680  isolate());
2681  data_container->set_next_code_link(*undefined_value(), SKIP_WRITE_BARRIER);
2682  data_container->set_kind_specific_flags(flags);
2683  data_container->clear_padding();
2684  return data_container;
2685 }
2686 
2687 MaybeHandle<Code> Factory::TryNewCode(
2688  const CodeDesc& desc, Code::Kind kind, Handle<Object> self_ref,
2689  int32_t builtin_index, MaybeHandle<ByteArray> maybe_source_position_table,
2690  MaybeHandle<DeoptimizationData> maybe_deopt_data, Movability movability,
2691  uint32_t stub_key, bool is_turbofanned, int stack_slots,
2692  int safepoint_table_offset, int handler_table_offset) {
2693  // Allocate objects needed for code initialization.
2694  Handle<ByteArray> reloc_info = NewByteArray(
2695  desc.reloc_size,
2696  Builtins::IsBuiltinId(builtin_index) ? TENURED_READ_ONLY : TENURED);
2697  Handle<CodeDataContainer> data_container = NewCodeDataContainer(0);
2698  Handle<ByteArray> source_position_table =
2699  maybe_source_position_table.is_null()
2700  ? empty_byte_array()
2701  : maybe_source_position_table.ToHandleChecked();
2702  Handle<DeoptimizationData> deopt_data =
2703  maybe_deopt_data.is_null() ? DeoptimizationData::Empty(isolate())
2704  : maybe_deopt_data.ToHandleChecked();
2705  Handle<Code> code;
2706  {
2707  int object_size = ComputeCodeObjectSize(desc);
2708 
2709  Heap* heap = isolate()->heap();
2710  CodePageCollectionMemoryModificationScope code_allocation(heap);
2711  HeapObject* result =
2712  heap->AllocateRawWithLightRetry(object_size, CODE_SPACE);
2713 
2714  // Return an empty handle if we cannot allocate the code object.
2715  if (!result) return MaybeHandle<Code>();
2716 
2717  if (movability == kImmovable) {
2718  result = heap->EnsureImmovableCode(result, object_size);
2719  }
2720 
2721  // The code object has not been fully initialized yet. We rely on the
2722  // fact that no allocation will happen from this point on.
2723  DisallowHeapAllocation no_gc;
2724 
2725  result->set_map_after_allocation(*code_map(), SKIP_WRITE_BARRIER);
2726  code = handle(Code::cast(result), isolate());
2727 
2728  InitializeCode(heap, code, object_size, desc, kind, self_ref, builtin_index,
2729  source_position_table, deopt_data, reloc_info,
2730  data_container, stub_key, is_turbofanned, stack_slots,
2731  safepoint_table_offset, handler_table_offset);
2732 
2733  // Flush the instruction cache before changing the permissions.
2734  // Note: we do this before setting permissions to ReadExecute because on
2735  // some older ARM kernels there is a bug which causes an access error on
2736  // cache flush instructions to trigger access error on non-writable memory.
2737  // See https://bugs.chromium.org/p/v8/issues/detail?id=8157
2738  code->FlushICache();
2739  }
2740 
2741  return code;
2742 }
2743 
2744 Handle<Code> Factory::NewCode(
2745  const CodeDesc& desc, Code::Kind kind, Handle<Object> self_ref,
2746  int32_t builtin_index, MaybeHandle<ByteArray> maybe_source_position_table,
2747  MaybeHandle<DeoptimizationData> maybe_deopt_data, Movability movability,
2748  uint32_t stub_key, bool is_turbofanned, int stack_slots,
2749  int safepoint_table_offset, int handler_table_offset) {
2750  // Allocate objects needed for code initialization.
2751  Handle<ByteArray> reloc_info = NewByteArray(
2752  desc.reloc_size,
2753  Builtins::IsBuiltinId(builtin_index) ? TENURED_READ_ONLY : TENURED);
2754  Handle<CodeDataContainer> data_container = NewCodeDataContainer(0);
2755  Handle<ByteArray> source_position_table =
2756  maybe_source_position_table.is_null()
2757  ? empty_byte_array()
2758  : maybe_source_position_table.ToHandleChecked();
2759  Handle<DeoptimizationData> deopt_data =
2760  maybe_deopt_data.is_null() ? DeoptimizationData::Empty(isolate())
2761  : maybe_deopt_data.ToHandleChecked();
2762 
2763  Handle<Code> code;
2764  {
2765  int object_size = ComputeCodeObjectSize(desc);
2766 
2767  Heap* heap = isolate()->heap();
2768  CodePageCollectionMemoryModificationScope code_allocation(heap);
2769  HeapObject* result =
2770  heap->AllocateRawWithRetryOrFail(object_size, CODE_SPACE);
2771  if (movability == kImmovable) {
2772  result = heap->EnsureImmovableCode(result, object_size);
2773  }
2774 
2775  // The code object has not been fully initialized yet. We rely on the
2776  // fact that no allocation will happen from this point on.
2777  DisallowHeapAllocation no_gc;
2778 
2779  result->set_map_after_allocation(*code_map(), SKIP_WRITE_BARRIER);
2780  code = handle(Code::cast(result), isolate());
2781 
2782  InitializeCode(heap, code, object_size, desc, kind, self_ref, builtin_index,
2783  source_position_table, deopt_data, reloc_info,
2784  data_container, stub_key, is_turbofanned, stack_slots,
2785  safepoint_table_offset, handler_table_offset);
2786 
2787  // Flush the instruction cache before changing the permissions.
2788  // Note: we do this before setting permissions to ReadExecute because on
2789  // some older ARM kernels there is a bug which causes an access error on
2790  // cache flush instructions to trigger access error on non-writable memory.
2791  // See https://bugs.chromium.org/p/v8/issues/detail?id=8157
2792  code->FlushICache();
2793  }
2794 
2795  return code;
2796 }
2797 
2798 Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
2799  Address off_heap_entry) {
2800  CHECK_NOT_NULL(isolate()->embedded_blob());
2801  CHECK_NE(0, isolate()->embedded_blob_size());
2802  CHECK(Builtins::IsIsolateIndependentBuiltin(*code));
2803 
2804  Handle<Code> result =
2805  Builtins::GenerateOffHeapTrampolineFor(isolate(), off_heap_entry);
2806 
2807  // The trampoline code object must inherit specific flags from the original
2808  // builtin (e.g. the safepoint-table offset). We set them manually here.
2809 
2810  {
2811  MemoryChunk* chunk = MemoryChunk::FromAddress(result->ptr());
2812  CodePageMemoryModificationScope code_allocation(chunk);
2813 
2814  const bool set_is_off_heap_trampoline = true;
2815  const int stack_slots =
2816  code->has_safepoint_info() ? code->stack_slots() : 0;
2817  result->initialize_flags(code->kind(), code->has_unwinding_info(),
2818  code->is_turbofanned(), stack_slots,
2819  set_is_off_heap_trampoline);
2820  result->set_builtin_index(code->builtin_index());
2821  result->set_handler_table_offset(code->handler_table_offset());
2822  result->code_data_container()->set_kind_specific_flags(
2823  code->code_data_container()->kind_specific_flags());
2824  result->set_constant_pool_offset(code->constant_pool_offset());
2825  if (code->has_safepoint_info()) {
2826  result->set_safepoint_table_offset(code->safepoint_table_offset());
2827  }
2828 
2829  // Replace the newly generated trampoline's RelocInfo ByteArray with the
2830  // canonical one stored in the roots to avoid duplicating it for every
2831  // single builtin.
2832  ByteArray canonical_reloc_info =
2833  ReadOnlyRoots(isolate()).off_heap_trampoline_relocation_info();
2834 #ifdef DEBUG
2835  // Verify that the contents are the same.
2836  ByteArray reloc_info = result->relocation_info();
2837  DCHECK_EQ(reloc_info->length(), canonical_reloc_info->length());
2838  for (int i = 0; i < reloc_info->length(); ++i) {
2839  DCHECK_EQ(reloc_info->get(i), canonical_reloc_info->get(i));
2840  }
2841 #endif
2842  result->set_relocation_info(canonical_reloc_info);
2843  }
2844 
2845  return result;
2846 }
2847 
2848 Handle<Code> Factory::CopyCode(Handle<Code> code) {
2849  Handle<CodeDataContainer> data_container =
2850  NewCodeDataContainer(code->code_data_container()->kind_specific_flags());
2851 
2852  Heap* heap = isolate()->heap();
2853  Handle<Code> new_code;
2854  {
2855  int obj_size = code->Size();
2856  CodePageCollectionMemoryModificationScope code_allocation(heap);
2857  HeapObject* result = heap->AllocateRawWithRetryOrFail(obj_size, CODE_SPACE);
2858 
2859  // Copy code object.
2860  Address old_addr = code->address();
2861  Address new_addr = result->address();
2862  Heap::CopyBlock(new_addr, old_addr, obj_size);
2863  new_code = handle(Code::cast(result), isolate());
2864 
2865  // Set the {CodeDataContainer}, it cannot be shared.
2866  new_code->set_code_data_container(*data_container);
2867 
2868  new_code->Relocate(new_addr - old_addr);
2869  // We have to iterate over the object and process its pointers when black
2870  // allocation is on.
2871  heap->incremental_marking()->ProcessBlackAllocatedObject(*new_code);
2872  // Record all references to embedded objects in the new code object.
2873  WriteBarrierForCode(*new_code);
2874  }
2875 
2876 #ifdef VERIFY_HEAP
2877  if (FLAG_verify_heap) new_code->ObjectVerify(isolate());
2878 #endif
2879  DCHECK(IsAligned(new_code->address(), kCodeAlignment));
2880  DCHECK_IMPLIES(
2881  !heap->memory_allocator()->code_range().is_empty(),
2882  heap->memory_allocator()->code_range().contains(new_code->address()));
2883  return new_code;
2884 }
2885 
2886 Handle<BytecodeArray> Factory::CopyBytecodeArray(
2887  Handle<BytecodeArray> bytecode_array) {
2888  int size = BytecodeArray::SizeFor(bytecode_array->length());
2889  HeapObject* result =
2890  AllocateRawWithImmortalMap(size, TENURED, *bytecode_array_map());
2891 
2892  Handle<BytecodeArray> copy(BytecodeArray::cast(result), isolate());
2893  copy->set_length(bytecode_array->length());
2894  copy->set_frame_size(bytecode_array->frame_size());
2895  copy->set_parameter_count(bytecode_array->parameter_count());
2896  copy->set_incoming_new_target_or_generator_register(
2897  bytecode_array->incoming_new_target_or_generator_register());
2898  copy->set_constant_pool(bytecode_array->constant_pool());
2899  copy->set_handler_table(bytecode_array->handler_table());
2900  copy->set_source_position_table(bytecode_array->source_position_table());
2901  copy->set_interrupt_budget(bytecode_array->interrupt_budget());
2902  copy->set_osr_loop_nesting_level(bytecode_array->osr_loop_nesting_level());
2903  copy->set_bytecode_age(bytecode_array->bytecode_age());
2904  bytecode_array->CopyBytecodesTo(*copy);
2905  return copy;
2906 }
2907 
2908 Handle<JSObject> Factory::NewJSObject(Handle<JSFunction> constructor,
2909  PretenureFlag pretenure) {
2910  JSFunction::EnsureHasInitialMap(constructor);
2911  Handle<Map> map(constructor->initial_map(), isolate());
2912  return NewJSObjectFromMap(map, pretenure);
2913 }
2914 
2915 Handle<JSObject> Factory::NewJSObjectWithNullProto(PretenureFlag pretenure) {
2916  Handle<JSObject> result =
2917  NewJSObject(isolate()->object_function(), pretenure);
2918  Handle<Map> new_map = Map::Copy(
2919  isolate(), Handle<Map>(result->map(), isolate()), "ObjectWithNullProto");
2920  Map::SetPrototype(isolate(), new_map, null_value());
2921  JSObject::MigrateToMap(result, new_map);
2922  return result;
2923 }
2924 
2925 Handle<JSGlobalObject> Factory::NewJSGlobalObject(
2926  Handle<JSFunction> constructor) {
2927  DCHECK(constructor->has_initial_map());
2928  Handle<Map> map(constructor->initial_map(), isolate());
2929  DCHECK(map->is_dictionary_map());
2930 
2931  // Make sure no field properties are described in the initial map.
2932  // This guarantees us that normalizing the properties does not
2933  // require us to change property values to PropertyCells.
2934  DCHECK_EQ(map->NextFreePropertyIndex(), 0);
2935 
2936  // Make sure we don't have a ton of pre-allocated slots in the
2937  // global objects. They will be unused once we normalize the object.
2938  DCHECK_EQ(map->UnusedPropertyFields(), 0);
2939  DCHECK_EQ(map->GetInObjectProperties(), 0);
2940 
2941  // Initial size of the backing store to avoid resize of the storage during
2942  // bootstrapping. The size differs between the JS global object ad the
2943  // builtins object.
2944  int initial_size = 64;
2945 
2946  // Allocate a dictionary object for backing storage.
2947  int at_least_space_for = map->NumberOfOwnDescriptors() * 2 + initial_size;
2948  Handle<GlobalDictionary> dictionary =
2949  GlobalDictionary::New(isolate(), at_least_space_for);
2950 
2951  // The global object might be created from an object template with accessors.
2952  // Fill these accessors into the dictionary.
2953  Handle<DescriptorArray> descs(map->instance_descriptors(), isolate());
2954  for (int i = 0; i < map->NumberOfOwnDescriptors(); i++) {
2955  PropertyDetails details = descs->GetDetails(i);
2956  // Only accessors are expected.
2957  DCHECK_EQ(kAccessor, details.kind());
2958  PropertyDetails d(kAccessor, details.attributes(),
2959  PropertyCellType::kMutable);
2960  Handle<Name> name(descs->GetKey(i), isolate());
2961  Handle<PropertyCell> cell = NewPropertyCell(name);
2962  cell->set_value(descs->GetStrongValue(i));
2963  // |dictionary| already contains enough space for all properties.
2964  USE(GlobalDictionary::Add(isolate(), dictionary, name, cell, d));
2965  }
2966 
2967  // Allocate the global object and initialize it with the backing store.
2968  Handle<JSGlobalObject> global(JSGlobalObject::cast(New(map, TENURED)),
2969  isolate());
2970  InitializeJSObjectFromMap(global, dictionary, map);
2971 
2972  // Create a new map for the global object.
2973  Handle<Map> new_map = Map::CopyDropDescriptors(isolate(), map);
2974  new_map->set_may_have_interesting_symbols(true);
2975  new_map->set_is_dictionary_map(true);
2976  LOG(isolate(), MapDetails(*new_map));
2977 
2978  // Set up the global object as a normalized object.
2979  global->set_global_dictionary(*dictionary);
2980  global->synchronized_set_map(*new_map);
2981 
2982  // Make sure result is a global object with properties in dictionary.
2983  DCHECK(global->IsJSGlobalObject() && !global->HasFastProperties());
2984  return global;
2985 }
2986 
2987 void Factory::InitializeJSObjectFromMap(Handle<JSObject> obj,
2988  Handle<Object> properties,
2989  Handle<Map> map) {
2990  obj->set_raw_properties_or_hash(*properties);
2991  obj->initialize_elements();
2992  // TODO(1240798): Initialize the object's body using valid initial values
2993  // according to the object's initial map. For example, if the map's
2994  // instance type is JS_ARRAY_TYPE, the length field should be initialized
2995  // to a number (e.g. Smi::kZero) and the elements initialized to a
2996  // fixed array (e.g. Heap::empty_fixed_array()). Currently, the object
2997  // verification code has to cope with (temporarily) invalid objects. See
2998  // for example, JSArray::JSArrayVerify).
2999  InitializeJSObjectBody(obj, map, JSObject::kHeaderSize);
3000 }
3001 
3002 void Factory::InitializeJSObjectBody(Handle<JSObject> obj, Handle<Map> map,
3003  int start_offset) {
3004  if (start_offset == map->instance_size()) return;
3005  DCHECK_LT(start_offset, map->instance_size());
3006 
3007  // We cannot always fill with one_pointer_filler_map because objects
3008  // created from API functions expect their embedder fields to be initialized
3009  // with undefined_value.
3010  // Pre-allocated fields need to be initialized with undefined_value as well
3011  // so that object accesses before the constructor completes (e.g. in the
3012  // debugger) will not cause a crash.
3013 
3014  // In case of Array subclassing the |map| could already be transitioned
3015  // to different elements kind from the initial map on which we track slack.
3016  bool in_progress = map->IsInobjectSlackTrackingInProgress();
3017  Object* filler;
3018  if (in_progress) {
3019  filler = *one_pointer_filler_map();
3020  } else {
3021  filler = *undefined_value();
3022  }
3023  obj->InitializeBody(*map, start_offset, *undefined_value(), filler);
3024  if (in_progress) {
3025  map->FindRootMap(isolate())->InobjectSlackTrackingStep(isolate());
3026  }
3027 }
3028 
3029 Handle<JSObject> Factory::NewJSObjectFromMap(
3030  Handle<Map> map, PretenureFlag pretenure,
3031  Handle<AllocationSite> allocation_site) {
3032  // JSFunctions should be allocated using AllocateFunction to be
3033  // properly initialized.
3034  DCHECK(map->instance_type() != JS_FUNCTION_TYPE);
3035 
3036  // Both types of global objects should be allocated using
3037  // AllocateGlobalObject to be properly initialized.
3038  DCHECK(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
3039 
3040  HeapObject* obj =
3041  AllocateRawWithAllocationSite(map, pretenure, allocation_site);
3042  Handle<JSObject> js_obj(JSObject::cast(obj), isolate());
3043 
3044  InitializeJSObjectFromMap(js_obj, empty_fixed_array(), map);
3045 
3046  DCHECK(js_obj->HasFastElements() || js_obj->HasFixedTypedArrayElements() ||
3047  js_obj->HasFastStringWrapperElements() ||
3048  js_obj->HasFastArgumentsElements());
3049  return js_obj;
3050 }
3051 
3052 Handle<JSObject> Factory::NewSlowJSObjectFromMap(Handle<Map> map, int capacity,
3053  PretenureFlag pretenure) {
3054  DCHECK(map->is_dictionary_map());
3055  Handle<NameDictionary> object_properties =
3056  NameDictionary::New(isolate(), capacity);
3057  Handle<JSObject> js_object = NewJSObjectFromMap(map, pretenure);
3058  js_object->set_raw_properties_or_hash(*object_properties);
3059  return js_object;
3060 }
3061 
3062 Handle<JSObject> Factory::NewSlowJSObjectWithPropertiesAndElements(
3063  Handle<Object> prototype, Handle<NameDictionary> properties,
3064  Handle<FixedArrayBase> elements, PretenureFlag pretenure) {
3065  Handle<Map> object_map = isolate()->slow_object_with_object_prototype_map();
3066  if (object_map->prototype() != *prototype) {
3067  object_map = Map::TransitionToPrototype(isolate(), object_map, prototype);
3068  }
3069  DCHECK(object_map->is_dictionary_map());
3070  Handle<JSObject> object = NewJSObjectFromMap(object_map, pretenure);
3071  object->set_raw_properties_or_hash(*properties);
3072  if (*elements != ReadOnlyRoots(isolate()).empty_fixed_array()) {
3073  DCHECK(elements->IsNumberDictionary());
3074  object_map =
3075  JSObject::GetElementsTransitionMap(object, DICTIONARY_ELEMENTS);
3076  JSObject::MigrateToMap(object, object_map);
3077  object->set_elements(*elements);
3078  }
3079  return object;
3080 }
3081 
3082 Handle<JSArray> Factory::NewJSArray(ElementsKind elements_kind,
3083  PretenureFlag pretenure) {
3084  NativeContext native_context = isolate()->raw_native_context();
3085  Map map = native_context->GetInitialJSArrayMap(elements_kind);
3086  if (map.is_null()) {
3087  JSFunction* array_function = native_context->array_function();
3088  map = array_function->initial_map();
3089  }
3090  return Handle<JSArray>::cast(
3091  NewJSObjectFromMap(handle(map, isolate()), pretenure));
3092 }
3093 
3094 Handle<JSArray> Factory::NewJSArray(ElementsKind elements_kind, int length,
3095  int capacity,
3096  ArrayStorageAllocationMode mode,
3097  PretenureFlag pretenure) {
3098  Handle<JSArray> array = NewJSArray(elements_kind, pretenure);
3099  NewJSArrayStorage(array, length, capacity, mode);
3100  return array;
3101 }
3102 
3103 Handle<JSArray> Factory::NewJSArrayWithElements(Handle<FixedArrayBase> elements,
3104  ElementsKind elements_kind,
3105  int length,
3106  PretenureFlag pretenure) {
3107  DCHECK(length <= elements->length());
3108  Handle<JSArray> array = NewJSArray(elements_kind, pretenure);
3109 
3110  array->set_elements(*elements);
3111  array->set_length(Smi::FromInt(length));
3112  JSObject::ValidateElements(*array);
3113  return array;
3114 }
3115 
3116 void Factory::NewJSArrayStorage(Handle<JSArray> array, int length, int capacity,
3117  ArrayStorageAllocationMode mode) {
3118  DCHECK(capacity >= length);
3119 
3120  if (capacity == 0) {
3121  array->set_length(Smi::kZero);
3122  array->set_elements(*empty_fixed_array());
3123  return;
3124  }
3125 
3126  HandleScope inner_scope(isolate());
3127  Handle<FixedArrayBase> elms;
3128  ElementsKind elements_kind = array->GetElementsKind();
3129  if (IsDoubleElementsKind(elements_kind)) {
3130  if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
3131  elms = NewFixedDoubleArray(capacity);
3132  } else {
3133  DCHECK(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
3134  elms = NewFixedDoubleArrayWithHoles(capacity);
3135  }
3136  } else {
3137  DCHECK(IsSmiOrObjectElementsKind(elements_kind));
3138  if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
3139  elms = NewUninitializedFixedArray(capacity);
3140  } else {
3141  DCHECK(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
3142  elms = NewFixedArrayWithHoles(capacity);
3143  }
3144  }
3145 
3146  array->set_elements(*elms);
3147  array->set_length(Smi::FromInt(length));
3148 }
3149 
3150 Handle<JSWeakMap> Factory::NewJSWeakMap() {
3151  NativeContext native_context = isolate()->raw_native_context();
3152  Handle<Map> map(native_context->js_weak_map_fun()->initial_map(), isolate());
3153  Handle<JSWeakMap> weakmap(JSWeakMap::cast(*NewJSObjectFromMap(map)),
3154  isolate());
3155  {
3156  // Do not leak handles for the hash table, it would make entries strong.
3157  HandleScope scope(isolate());
3158  JSWeakCollection::Initialize(weakmap, isolate());
3159  }
3160  return weakmap;
3161 }
3162 
3163 Handle<JSModuleNamespace> Factory::NewJSModuleNamespace() {
3164  Handle<Map> map = isolate()->js_module_namespace_map();
3165  Handle<JSModuleNamespace> module_namespace(
3166  Handle<JSModuleNamespace>::cast(NewJSObjectFromMap(map)));
3167  FieldIndex index = FieldIndex::ForDescriptor(
3168  *map, JSModuleNamespace::kToStringTagFieldIndex);
3169  module_namespace->FastPropertyAtPut(index,
3170  ReadOnlyRoots(isolate()).Module_string());
3171  return module_namespace;
3172 }
3173 
3174 Handle<JSGeneratorObject> Factory::NewJSGeneratorObject(
3175  Handle<JSFunction> function) {
3176  DCHECK(IsResumableFunction(function->shared()->kind()));
3177  JSFunction::EnsureHasInitialMap(function);
3178  Handle<Map> map(function->initial_map(), isolate());
3179 
3180  DCHECK(map->instance_type() == JS_GENERATOR_OBJECT_TYPE ||
3181  map->instance_type() == JS_ASYNC_GENERATOR_OBJECT_TYPE);
3182 
3183  return Handle<JSGeneratorObject>::cast(NewJSObjectFromMap(map));
3184 }
3185 
3186 Handle<Module> Factory::NewModule(Handle<SharedFunctionInfo> code) {
3187  Handle<ModuleInfo> module_info(code->scope_info()->ModuleDescriptorInfo(),
3188  isolate());
3189  Handle<ObjectHashTable> exports =
3190  ObjectHashTable::New(isolate(), module_info->RegularExportCount());
3191  Handle<FixedArray> regular_exports =
3192  NewFixedArray(module_info->RegularExportCount());
3193  Handle<FixedArray> regular_imports =
3194  NewFixedArray(module_info->regular_imports()->length());
3195  int requested_modules_length = module_info->module_requests()->length();
3196  Handle<FixedArray> requested_modules =
3197  requested_modules_length > 0 ? NewFixedArray(requested_modules_length)
3198  : empty_fixed_array();
3199 
3200  ReadOnlyRoots roots(isolate());
3201  Handle<Module> module = Handle<Module>::cast(NewStruct(MODULE_TYPE, TENURED));
3202  module->set_code(*code);
3203  module->set_exports(*exports);
3204  module->set_regular_exports(*regular_exports);
3205  module->set_regular_imports(*regular_imports);
3206  module->set_hash(isolate()->GenerateIdentityHash(Smi::kMaxValue));
3207  module->set_module_namespace(roots.undefined_value());
3208  module->set_requested_modules(*requested_modules);
3209  module->set_script(Script::cast(code->script()));
3210  module->set_status(Module::kUninstantiated);
3211  module->set_exception(roots.the_hole_value());
3212  module->set_import_meta(roots.the_hole_value());
3213  module->set_dfs_index(-1);
3214  module->set_dfs_ancestor_index(-1);
3215  return module;
3216 }
3217 
3218 Handle<JSArrayBuffer> Factory::NewJSArrayBuffer(SharedFlag shared,
3219  PretenureFlag pretenure) {
3220  Handle<JSFunction> array_buffer_fun(
3221  shared == SharedFlag::kShared
3222  ? isolate()->native_context()->shared_array_buffer_fun()
3223  : isolate()->native_context()->array_buffer_fun(),
3224  isolate());
3225  Handle<Map> map(array_buffer_fun->initial_map(), isolate());
3226  return Handle<JSArrayBuffer>::cast(NewJSObjectFromMap(map, pretenure));
3227 }
3228 
3229 Handle<JSIteratorResult> Factory::NewJSIteratorResult(Handle<Object> value,
3230  bool done) {
3231  Handle<Map> map(isolate()->native_context()->iterator_result_map(),
3232  isolate());
3233  Handle<JSIteratorResult> js_iter_result =
3234  Handle<JSIteratorResult>::cast(NewJSObjectFromMap(map));
3235  js_iter_result->set_value(*value);
3236  js_iter_result->set_done(*ToBoolean(done));
3237  return js_iter_result;
3238 }
3239 
3240 Handle<JSAsyncFromSyncIterator> Factory::NewJSAsyncFromSyncIterator(
3241  Handle<JSReceiver> sync_iterator, Handle<Object> next) {
3242  Handle<Map> map(isolate()->native_context()->async_from_sync_iterator_map(),
3243  isolate());
3244  Handle<JSAsyncFromSyncIterator> iterator =
3245  Handle<JSAsyncFromSyncIterator>::cast(NewJSObjectFromMap(map));
3246 
3247  iterator->set_sync_iterator(*sync_iterator);
3248  iterator->set_next(*next);
3249  return iterator;
3250 }
3251 
3252 Handle<JSMap> Factory::NewJSMap() {
3253  Handle<Map> map(isolate()->native_context()->js_map_map(), isolate());
3254  Handle<JSMap> js_map = Handle<JSMap>::cast(NewJSObjectFromMap(map));
3255  JSMap::Initialize(js_map, isolate());
3256  return js_map;
3257 }
3258 
3259 Handle<JSSet> Factory::NewJSSet() {
3260  Handle<Map> map(isolate()->native_context()->js_set_map(), isolate());
3261  Handle<JSSet> js_set = Handle<JSSet>::cast(NewJSObjectFromMap(map));
3262  JSSet::Initialize(js_set, isolate());
3263  return js_set;
3264 }
3265 
3266 void Factory::TypeAndSizeForElementsKind(ElementsKind kind,
3267  ExternalArrayType* array_type,
3268  size_t* element_size) {
3269  switch (kind) {
3270 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
3271  case TYPE##_ELEMENTS: \
3272  *array_type = kExternal##Type##Array; \
3273  *element_size = sizeof(ctype); \
3274  break;
3275  TYPED_ARRAYS(TYPED_ARRAY_CASE)
3276 #undef TYPED_ARRAY_CASE
3277 
3278  default:
3279  UNREACHABLE();
3280  }
3281 }
3282 
3283 namespace {
3284 
3285 static void ForFixedTypedArray(ExternalArrayType array_type,
3286  size_t* element_size,
3287  ElementsKind* element_kind) {
3288  switch (array_type) {
3289 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
3290  case kExternal##Type##Array: \
3291  *element_size = sizeof(ctype); \
3292  *element_kind = TYPE##_ELEMENTS; \
3293  return;
3294 
3295  TYPED_ARRAYS(TYPED_ARRAY_CASE)
3296 #undef TYPED_ARRAY_CASE
3297  }
3298  UNREACHABLE();
3299 }
3300 
3301 JSFunction* GetTypedArrayFun(ExternalArrayType type, Isolate* isolate) {
3302  NativeContext native_context = isolate->context()->native_context();
3303  switch (type) {
3304 #define TYPED_ARRAY_FUN(Type, type, TYPE, ctype) \
3305  case kExternal##Type##Array: \
3306  return native_context->type##_array_fun();
3307 
3308  TYPED_ARRAYS(TYPED_ARRAY_FUN)
3309 #undef TYPED_ARRAY_FUN
3310  }
3311  UNREACHABLE();
3312 }
3313 
3314 JSFunction* GetTypedArrayFun(ElementsKind elements_kind, Isolate* isolate) {
3315  NativeContext native_context = isolate->context()->native_context();
3316  switch (elements_kind) {
3317 #define TYPED_ARRAY_FUN(Type, type, TYPE, ctype) \
3318  case TYPE##_ELEMENTS: \
3319  return native_context->type##_array_fun();
3320 
3321  TYPED_ARRAYS(TYPED_ARRAY_FUN)
3322 #undef TYPED_ARRAY_FUN
3323 
3324  default:
3325  UNREACHABLE();
3326  }
3327 }
3328 
3329 void SetupArrayBufferView(i::Isolate* isolate,
3332  size_t byte_offset, size_t byte_length) {
3333  DCHECK_LE(byte_offset + byte_length, buffer->byte_length());
3334  DCHECK_EQ(obj->GetEmbedderFieldCount(),
3335  v8::ArrayBufferView::kEmbedderFieldCount);
3336  for (int i = 0; i < v8::ArrayBufferView::kEmbedderFieldCount; i++) {
3337  obj->SetEmbedderField(i, Smi::kZero);
3338  }
3339  obj->set_buffer(*buffer);
3340  obj->set_byte_offset(byte_offset);
3341  obj->set_byte_length(byte_length);
3342 }
3343 
3344 } // namespace
3345 
3346 Handle<JSTypedArray> Factory::NewJSTypedArray(ExternalArrayType type,
3347  PretenureFlag pretenure) {
3348  Handle<JSFunction> typed_array_fun(GetTypedArrayFun(type, isolate()),
3349  isolate());
3350  Handle<Map> map(typed_array_fun->initial_map(), isolate());
3351  return Handle<JSTypedArray>::cast(NewJSObjectFromMap(map, pretenure));
3352 }
3353 
3354 Handle<JSTypedArray> Factory::NewJSTypedArray(ElementsKind elements_kind,
3355  PretenureFlag pretenure) {
3356  Handle<JSFunction> typed_array_fun(GetTypedArrayFun(elements_kind, isolate()),
3357  isolate());
3358  Handle<Map> map(typed_array_fun->initial_map(), isolate());
3359  return Handle<JSTypedArray>::cast(NewJSObjectFromMap(map, pretenure));
3360 }
3361 
3362 Handle<JSTypedArray> Factory::NewJSTypedArray(ExternalArrayType type,
3363  Handle<JSArrayBuffer> buffer,
3364  size_t byte_offset, size_t length,
3365  PretenureFlag pretenure) {
3366  Handle<JSTypedArray> obj = NewJSTypedArray(type, pretenure);
3367 
3368  size_t element_size;
3369  ElementsKind elements_kind;
3370  ForFixedTypedArray(type, &element_size, &elements_kind);
3371 
3372  CHECK_EQ(byte_offset % element_size, 0);
3373 
3374  CHECK(length <= (std::numeric_limits<size_t>::max() / element_size));
3375  // TODO(7881): Smi length check
3376  CHECK(length <= static_cast<size_t>(Smi::kMaxValue));
3377  size_t byte_length = length * element_size;
3378  SetupArrayBufferView(isolate(), obj, buffer, byte_offset, byte_length);
3379 
3380  Handle<Object> length_object = NewNumberFromSize(length, pretenure);
3381  obj->set_length(*length_object);
3382 
3383  Handle<FixedTypedArrayBase> elements = NewFixedTypedArrayWithExternalPointer(
3384  static_cast<int>(length), type,
3385  static_cast<uint8_t*>(buffer->backing_store()) + byte_offset, pretenure);
3386  Handle<Map> map = JSObject::GetElementsTransitionMap(obj, elements_kind);
3387  JSObject::SetMapAndElements(obj, map, elements);
3388  return obj;
3389 }
3390 
3391 Handle<JSTypedArray> Factory::NewJSTypedArray(ElementsKind elements_kind,
3392  size_t number_of_elements,
3393  PretenureFlag pretenure) {
3394  Handle<JSTypedArray> obj = NewJSTypedArray(elements_kind, pretenure);
3395  DCHECK_EQ(obj->GetEmbedderFieldCount(),
3396  v8::ArrayBufferView::kEmbedderFieldCount);
3397  for (int i = 0; i < v8::ArrayBufferView::kEmbedderFieldCount; i++) {
3398  obj->SetEmbedderField(i, Smi::kZero);
3399  }
3400 
3401  size_t element_size;
3402  ExternalArrayType array_type;
3403  TypeAndSizeForElementsKind(elements_kind, &array_type, &element_size);
3404 
3405  CHECK(number_of_elements <=
3406  (std::numeric_limits<size_t>::max() / element_size));
3407  // TODO(7881): Smi length check
3408  CHECK(number_of_elements <= static_cast<size_t>(Smi::kMaxValue));
3409  size_t byte_length = number_of_elements * element_size;
3410 
3411  obj->set_byte_offset(0);
3412  obj->set_byte_length(byte_length);
3413  obj->set_length(Smi::FromIntptr(static_cast<intptr_t>(number_of_elements)));
3414 
3415  Handle<JSArrayBuffer> buffer =
3416  NewJSArrayBuffer(SharedFlag::kNotShared, pretenure);
3417  JSArrayBuffer::Setup(buffer, isolate(), true, nullptr, byte_length,
3418  SharedFlag::kNotShared);
3419  obj->set_buffer(*buffer);
3420  Handle<FixedTypedArrayBase> elements = NewFixedTypedArray(
3421  number_of_elements, byte_length, array_type, true, pretenure);
3422  obj->set_elements(*elements);
3423  return obj;
3424 }
3425 
3426 Handle<JSDataView> Factory::NewJSDataView(Handle<JSArrayBuffer> buffer,
3427  size_t byte_offset,
3428  size_t byte_length) {
3429  Handle<Map> map(isolate()->native_context()->data_view_fun()->initial_map(),
3430  isolate());
3431  Handle<JSDataView> obj = Handle<JSDataView>::cast(NewJSObjectFromMap(map));
3432  SetupArrayBufferView(isolate(), obj, buffer, byte_offset, byte_length);
3433  return obj;
3434 }
3435 
3436 MaybeHandle<JSBoundFunction> Factory::NewJSBoundFunction(
3437  Handle<JSReceiver> target_function, Handle<Object> bound_this,
3438  Vector<Handle<Object>> bound_args) {
3439  DCHECK(target_function->IsCallable());
3440  STATIC_ASSERT(Code::kMaxArguments <= FixedArray::kMaxLength);
3441  if (bound_args.length() >= Code::kMaxArguments) {
3442  THROW_NEW_ERROR(isolate(),
3443  NewRangeError(MessageTemplate::kTooManyArguments),
3444  JSBoundFunction);
3445  }
3446 
3447  // Determine the prototype of the {target_function}.
3448  Handle<Object> prototype;
3449  ASSIGN_RETURN_ON_EXCEPTION(
3450  isolate(), prototype,
3451  JSReceiver::GetPrototype(isolate(), target_function), JSBoundFunction);
3452 
3453  SaveContext save(isolate());
3454  isolate()->set_context(*target_function->GetCreationContext());
3455 
3456  // Create the [[BoundArguments]] for the result.
3457  Handle<FixedArray> bound_arguments;
3458  if (bound_args.length() == 0) {
3459  bound_arguments = empty_fixed_array();
3460  } else {
3461  bound_arguments = NewFixedArray(bound_args.length());
3462  for (int i = 0; i < bound_args.length(); ++i) {
3463  bound_arguments->set(i, *bound_args[i]);
3464  }
3465  }
3466 
3467  // Setup the map for the JSBoundFunction instance.
3468  Handle<Map> map = target_function->IsConstructor()
3469  ? isolate()->bound_function_with_constructor_map()
3470  : isolate()->bound_function_without_constructor_map();
3471  if (map->prototype() != *prototype) {
3472  map = Map::TransitionToPrototype(isolate(), map, prototype);
3473  }
3474  DCHECK_EQ(target_function->IsConstructor(), map->is_constructor());
3475 
3476  // Setup the JSBoundFunction instance.
3477  Handle<JSBoundFunction> result =
3478  Handle<JSBoundFunction>::cast(NewJSObjectFromMap(map));
3479  result->set_bound_target_function(*target_function);
3480  result->set_bound_this(*bound_this);
3481  result->set_bound_arguments(*bound_arguments);
3482  return result;
3483 }
3484 
3485 // ES6 section 9.5.15 ProxyCreate (target, handler)
3486 Handle<JSProxy> Factory::NewJSProxy(Handle<JSReceiver> target,
3487  Handle<JSReceiver> handler) {
3488  // Allocate the proxy object.
3489  Handle<Map> map;
3490  if (target->IsCallable()) {
3491  if (target->IsConstructor()) {
3492  map = Handle<Map>(isolate()->proxy_constructor_map());
3493  } else {
3494  map = Handle<Map>(isolate()->proxy_callable_map());
3495  }
3496  } else {
3497  map = Handle<Map>(isolate()->proxy_map());
3498  }
3499  DCHECK(map->prototype()->IsNull(isolate()));
3500  Handle<JSProxy> result(JSProxy::cast(New(map, NOT_TENURED)), isolate());
3501  result->initialize_properties();
3502  result->set_target(*target);
3503  result->set_handler(*handler);
3504  return result;
3505 }
3506 
3507 Handle<JSGlobalProxy> Factory::NewUninitializedJSGlobalProxy(int size) {
3508  // Create an empty shell of a JSGlobalProxy that needs to be reinitialized
3509  // via ReinitializeJSGlobalProxy later.
3510  Handle<Map> map = NewMap(JS_GLOBAL_PROXY_TYPE, size);
3511  // Maintain invariant expected from any JSGlobalProxy.
3512  map->set_is_access_check_needed(true);
3513  map->set_may_have_interesting_symbols(true);
3514  LOG(isolate(), MapDetails(*map));
3515  return Handle<JSGlobalProxy>::cast(NewJSObjectFromMap(map, NOT_TENURED));
3516 }
3517 
3518 void Factory::ReinitializeJSGlobalProxy(Handle<JSGlobalProxy> object,
3519  Handle<JSFunction> constructor) {
3520  DCHECK(constructor->has_initial_map());
3521  Handle<Map> map(constructor->initial_map(), isolate());
3522  Handle<Map> old_map(object->map(), isolate());
3523 
3524  // The proxy's hash should be retained across reinitialization.
3525  Handle<Object> raw_properties_or_hash(object->raw_properties_or_hash(),
3526  isolate());
3527 
3528  if (old_map->is_prototype_map()) {
3529  map = Map::Copy(isolate(), map, "CopyAsPrototypeForJSGlobalProxy");
3530  map->set_is_prototype_map(true);
3531  }
3532  JSObject::NotifyMapChange(old_map, map, isolate());
3533  old_map->NotifyLeafMapLayoutChange(isolate());
3534 
3535  // Check that the already allocated object has the same size and type as
3536  // objects allocated using the constructor.
3537  DCHECK(map->instance_size() == old_map->instance_size());
3538  DCHECK(map->instance_type() == old_map->instance_type());
3539 
3540  // In order to keep heap in consistent state there must be no allocations
3541  // before object re-initialization is finished.
3542  DisallowHeapAllocation no_allocation;
3543 
3544  // Reset the map for the object.
3545  object->synchronized_set_map(*map);
3546 
3547  // Reinitialize the object from the constructor map.
3548  InitializeJSObjectFromMap(object, raw_properties_or_hash, map);
3549 }
3550 
3551 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForLiteral(
3552  FunctionLiteral* literal, Handle<Script> script, bool is_toplevel) {
3553  FunctionKind kind = literal->kind();
3554  Handle<SharedFunctionInfo> shared = NewSharedFunctionInfoForBuiltin(
3555  literal->name(), Builtins::kCompileLazy, kind);
3556  SharedFunctionInfo::InitFromFunctionLiteral(shared, literal, is_toplevel);
3557  SharedFunctionInfo::SetScript(shared, script, literal->function_literal_id(),
3558  false);
3559  return shared;
3560 }
3561 
3562 Handle<JSMessageObject> Factory::NewJSMessageObject(
3563  MessageTemplate message, Handle<Object> argument, int start_position,
3564  int end_position, Handle<Script> script, Handle<Object> stack_frames) {
3565  Handle<Map> map = message_object_map();
3566  Handle<JSMessageObject> message_obj(
3567  JSMessageObject::cast(New(map, NOT_TENURED)), isolate());
3568  message_obj->set_raw_properties_or_hash(*empty_fixed_array(),
3569  SKIP_WRITE_BARRIER);
3570  message_obj->initialize_elements();
3571  message_obj->set_elements(*empty_fixed_array(), SKIP_WRITE_BARRIER);
3572  message_obj->set_type(message);
3573  message_obj->set_argument(*argument);
3574  message_obj->set_start_position(start_position);
3575  message_obj->set_end_position(end_position);
3576  message_obj->set_script(*script);
3577  message_obj->set_stack_frames(*stack_frames);
3578  message_obj->set_error_level(v8::Isolate::kMessageError);
3579  return message_obj;
3580 }
3581 
3582 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForApiFunction(
3583  MaybeHandle<String> maybe_name,
3584  Handle<FunctionTemplateInfo> function_template_info, FunctionKind kind) {
3585  Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo(
3586  maybe_name, function_template_info, Builtins::kNoBuiltinId, kind);
3587  return shared;
3588 }
3589 
3590 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForBuiltin(
3591  MaybeHandle<String> maybe_name, int builtin_index, FunctionKind kind) {
3592  // TODO(3770): Switch to MaybeHandle<Code>() after migration.
3593  Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo(
3594  maybe_name, MaybeHandle<HeapObject>(), builtin_index, kind);
3595  return shared;
3596 }
3597 
3598 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(
3599  MaybeHandle<String> maybe_name, MaybeHandle<HeapObject> maybe_function_data,
3600  int maybe_builtin_index, FunctionKind kind) {
3601  // Function names are assumed to be flat elsewhere. Must flatten before
3602  // allocating SharedFunctionInfo to avoid GC seeing the uninitialized SFI.
3603  Handle<String> shared_name;
3604  bool has_shared_name = maybe_name.ToHandle(&shared_name);
3605  if (has_shared_name) {
3606  shared_name = String::Flatten(isolate(), shared_name, TENURED);
3607  }
3608 
3609  Handle<Map> map = shared_function_info_map();
3610  Handle<SharedFunctionInfo> share(SharedFunctionInfo::cast(New(map, TENURED)),
3611  isolate());
3612  {
3613  DisallowHeapAllocation no_allocation;
3614 
3615  // Set pointer fields.
3616  share->set_name_or_scope_info(
3617  has_shared_name ? Object::cast(*shared_name)
3618  : SharedFunctionInfo::kNoSharedNameSentinel);
3619  Handle<HeapObject> function_data;
3620  if (maybe_function_data.ToHandle(&function_data)) {
3621  // If we pass function_data then we shouldn't pass a builtin index, and
3622  // the function_data should not be code with a builtin.
3623  DCHECK(!Builtins::IsBuiltinId(maybe_builtin_index));
3624  DCHECK_IMPLIES(function_data->IsCode(),
3625  !Code::cast(*function_data)->is_builtin());
3626  share->set_function_data(*function_data);
3627  } else if (Builtins::IsBuiltinId(maybe_builtin_index)) {
3628  share->set_builtin_id(maybe_builtin_index);
3629  } else {
3630  share->set_builtin_id(Builtins::kIllegal);
3631  }
3632  // Generally functions won't have feedback, unless they have been created
3633  // from a FunctionLiteral. Those can just reset this field to keep the
3634  // SharedFunctionInfo in a consistent state.
3635  if (maybe_builtin_index == Builtins::kCompileLazy) {
3636  share->set_raw_outer_scope_info_or_feedback_metadata(*the_hole_value(),
3637  SKIP_WRITE_BARRIER);
3638  } else {
3639  share->set_raw_outer_scope_info_or_feedback_metadata(
3640  *empty_feedback_metadata(), SKIP_WRITE_BARRIER);
3641  }
3642  share->set_script_or_debug_info(*undefined_value(), SKIP_WRITE_BARRIER);
3643 #if V8_SFI_HAS_UNIQUE_ID
3644  share->set_unique_id(isolate()->GetNextUniqueSharedFunctionInfoId());
3645 #endif
3646 
3647  // Set integer fields (smi or int, depending on the architecture).
3648  share->set_length(0);
3649  share->set_internal_formal_parameter_count(0);
3650  share->set_expected_nof_properties(0);
3651  share->set_builtin_function_id(
3652  BuiltinFunctionId::kInvalidBuiltinFunctionId);
3653  share->set_raw_function_token_offset(0);
3654  // All flags default to false or 0.
3655  share->set_flags(0);
3656  share->CalculateConstructAsBuiltin();
3657  share->set_kind(kind);
3658 
3659  share->clear_padding();
3660  }
3661  // Link into the list.
3662  Handle<WeakArrayList> noscript_list = noscript_shared_function_infos();
3663  noscript_list = WeakArrayList::AddToEnd(isolate(), noscript_list,
3664  MaybeObjectHandle::Weak(share));
3665  isolate()->heap()->set_noscript_shared_function_infos(*noscript_list);
3666 
3667 #ifdef VERIFY_HEAP
3668  share->SharedFunctionInfoVerify(isolate());
3669 #endif
3670  return share;
3671 }
3672 
3673 namespace {
3674 inline int NumberToStringCacheHash(Handle<FixedArray> cache, Smi number) {
3675  int mask = (cache->length() >> 1) - 1;
3676  return number->value() & mask;
3677 }
3678 inline int NumberToStringCacheHash(Handle<FixedArray> cache, double number) {
3679  int mask = (cache->length() >> 1) - 1;
3680  int64_t bits = bit_cast<int64_t>(number);
3681  return (static_cast<int>(bits) ^ static_cast<int>(bits >> 32)) & mask;
3682 }
3683 } // namespace
3684 
3685 Handle<String> Factory::NumberToStringCacheSet(Handle<Object> number, int hash,
3686  const char* string,
3687  bool check_cache) {
3688  // We tenure the allocated string since it is referenced from the
3689  // number-string cache which lives in the old space.
3690  Handle<String> js_string =
3691  NewStringFromAsciiChecked(string, check_cache ? TENURED : NOT_TENURED);
3692  if (!check_cache) return js_string;
3693 
3694  if (!number_string_cache()->get(hash * 2)->IsUndefined(isolate())) {
3695  int full_size = isolate()->heap()->MaxNumberToStringCacheSize();
3696  if (number_string_cache()->length() != full_size) {
3697  Handle<FixedArray> new_cache = NewFixedArray(full_size, TENURED);
3698  isolate()->heap()->set_number_string_cache(*new_cache);
3699  return js_string;
3700  }
3701  }
3702  number_string_cache()->set(hash * 2, *number);
3703  number_string_cache()->set(hash * 2 + 1, *js_string);
3704  return js_string;
3705 }
3706 
3707 Handle<Object> Factory::NumberToStringCacheGet(Object* number, int hash) {
3708  DisallowHeapAllocation no_gc;
3709  Object* key = number_string_cache()->get(hash * 2);
3710  if (key == number || (key->IsHeapNumber() && number->IsHeapNumber() &&
3711  key->Number() == number->Number())) {
3712  return Handle<String>(
3713  String::cast(number_string_cache()->get(hash * 2 + 1)), isolate());
3714  }
3715  return undefined_value();
3716 }
3717 
3718 Handle<String> Factory::NumberToString(Handle<Object> number,
3719  bool check_cache) {
3720  if (number->IsSmi()) return NumberToString(Smi::cast(*number), check_cache);
3721 
3722  double double_value = Handle<HeapNumber>::cast(number)->value();
3723  // Try to canonicalize doubles.
3724  int smi_value;
3725  if (DoubleToSmiInteger(double_value, &smi_value)) {
3726  return NumberToString(Smi::FromInt(smi_value), check_cache);
3727  }
3728 
3729  int hash = 0;
3730  if (check_cache) {
3731  hash = NumberToStringCacheHash(number_string_cache(), double_value);
3732  Handle<Object> cached = NumberToStringCacheGet(*number, hash);
3733  if (!cached->IsUndefined(isolate())) return Handle<String>::cast(cached);
3734  }
3735 
3736  char arr[100];
3737  Vector<char> buffer(arr, arraysize(arr));
3738  const char* string = DoubleToCString(double_value, buffer);
3739 
3740  return NumberToStringCacheSet(number, hash, string, check_cache);
3741 }
3742 
3743 Handle<String> Factory::NumberToString(Smi number, bool check_cache) {
3744  int hash = 0;
3745  if (check_cache) {
3746  hash = NumberToStringCacheHash(number_string_cache(), number);
3747  Handle<Object> cached = NumberToStringCacheGet(number, hash);
3748  if (!cached->IsUndefined(isolate())) return Handle<String>::cast(cached);
3749  }
3750 
3751  char arr[100];
3752  Vector<char> buffer(arr, arraysize(arr));
3753  const char* string = IntToCString(number->value(), buffer);
3754 
3755  return NumberToStringCacheSet(handle(number, isolate()), hash, string,
3756  check_cache);
3757 }
3758 
3759 Handle<DebugInfo> Factory::NewDebugInfo(Handle<SharedFunctionInfo> shared) {
3760  DCHECK(!shared->HasDebugInfo());
3761  Heap* heap = isolate()->heap();
3762 
3763  Handle<DebugInfo> debug_info =
3764  Handle<DebugInfo>::cast(NewStruct(DEBUG_INFO_TYPE, TENURED));
3765  debug_info->set_flags(DebugInfo::kNone);
3766  debug_info->set_shared(*shared);
3767  debug_info->set_debugger_hints(0);
3768  DCHECK_EQ(DebugInfo::kNoDebuggingId, debug_info->debugging_id());
3769  DCHECK(!shared->HasDebugInfo());
3770  debug_info->set_script(shared->script_or_debug_info());
3771  debug_info->set_original_bytecode_array(
3772  ReadOnlyRoots(heap).undefined_value());
3773  debug_info->set_debug_bytecode_array(ReadOnlyRoots(heap).undefined_value());
3774  debug_info->set_break_points(ReadOnlyRoots(heap).empty_fixed_array());
3775 
3776  // Link debug info to function.
3777  shared->SetDebugInfo(*debug_info);
3778 
3779  return debug_info;
3780 }
3781 
3782 Handle<CoverageInfo> Factory::NewCoverageInfo(
3783  const ZoneVector<SourceRange>& slots) {
3784  const int slot_count = static_cast<int>(slots.size());
3785 
3786  const int length = CoverageInfo::FixedArrayLengthForSlotCount(slot_count);
3787  Handle<CoverageInfo> info =
3788  Handle<CoverageInfo>::cast(NewUninitializedFixedArray(length));
3789 
3790  for (int i = 0; i < slot_count; i++) {
3791  SourceRange range = slots[i];
3792  info->InitializeSlot(i, range.start, range.end);
3793  }
3794 
3795  return info;
3796 }
3797 
3798 Handle<BreakPointInfo> Factory::NewBreakPointInfo(int source_position) {
3799  Handle<BreakPointInfo> new_break_point_info =
3800  Handle<BreakPointInfo>::cast(NewStruct(TUPLE2_TYPE, TENURED));
3801  new_break_point_info->set_source_position(source_position);
3802  new_break_point_info->set_break_points(*undefined_value());
3803  return new_break_point_info;
3804 }
3805 
3806 Handle<BreakPoint> Factory::NewBreakPoint(int id, Handle<String> condition) {
3807  Handle<BreakPoint> new_break_point =
3808  Handle<BreakPoint>::cast(NewStruct(TUPLE2_TYPE, TENURED));
3809  new_break_point->set_id(id);
3810  new_break_point->set_condition(*condition);
3811  return new_break_point;
3812 }
3813 
3814 Handle<StackFrameInfo> Factory::NewStackFrameInfo() {
3815  Handle<StackFrameInfo> stack_frame_info = Handle<StackFrameInfo>::cast(
3816  NewStruct(STACK_FRAME_INFO_TYPE, NOT_TENURED));
3817  stack_frame_info->set_line_number(0);
3818  stack_frame_info->set_column_number(0);
3819  stack_frame_info->set_script_id(0);
3820  stack_frame_info->set_script_name(Smi::kZero);
3821  stack_frame_info->set_script_name_or_source_url(Smi::kZero);
3822  stack_frame_info->set_function_name(Smi::kZero);
3823  stack_frame_info->set_flag(0);
3824  return stack_frame_info;
3825 }
3826 
3827 Handle<SourcePositionTableWithFrameCache>
3828 Factory::NewSourcePositionTableWithFrameCache(
3829  Handle<ByteArray> source_position_table,
3830  Handle<SimpleNumberDictionary> stack_frame_cache) {
3831  Handle<SourcePositionTableWithFrameCache>
3832  source_position_table_with_frame_cache =
3833  Handle<SourcePositionTableWithFrameCache>::cast(
3834  NewStruct(TUPLE2_TYPE, TENURED));
3835  source_position_table_with_frame_cache->set_source_position_table(
3836  *source_position_table);
3837  source_position_table_with_frame_cache->set_stack_frame_cache(
3838  *stack_frame_cache);
3839  return source_position_table_with_frame_cache;
3840 }
3841 
3842 Handle<JSObject> Factory::NewArgumentsObject(Handle<JSFunction> callee,
3843  int length) {
3844  bool strict_mode_callee = is_strict(callee->shared()->language_mode()) ||
3845  !callee->shared()->has_simple_parameters();
3846  Handle<Map> map = strict_mode_callee ? isolate()->strict_arguments_map()
3847  : isolate()->sloppy_arguments_map();
3848  AllocationSiteUsageContext context(isolate(), Handle<AllocationSite>(),
3849  false);
3850  DCHECK(!isolate()->has_pending_exception());
3851  Handle<JSObject> result = NewJSObjectFromMap(map);
3852  Handle<Smi> value(Smi::FromInt(length), isolate());
3853  Object::SetProperty(isolate(), result, length_string(), value,
3854  LanguageMode::kStrict)
3855  .Assert();
3856  if (!strict_mode_callee) {
3857  Object::SetProperty(isolate(), result, callee_string(), callee,
3858  LanguageMode::kStrict)
3859  .Assert();
3860  }
3861  return result;
3862 }
3863 
3864 Handle<Map> Factory::ObjectLiteralMapFromCache(Handle<NativeContext> context,
3865  int number_of_properties) {
3866  if (number_of_properties == 0) {
3867  // Reuse the initial map of the Object function if the literal has no
3868  // predeclared properties.
3869  return handle(context->object_function()->initial_map(), isolate());
3870  }
3871 
3872  // We do not cache maps for too many properties or when running builtin code.
3873  if (isolate()->bootstrapper()->IsActive()) {
3874  return Map::Create(isolate(), number_of_properties);
3875  }
3876 
3877  // Use initial slow object proto map for too many properties.
3878  const int kMapCacheSize = 128;
3879  if (number_of_properties > kMapCacheSize) {
3880  return handle(context->slow_object_with_object_prototype_map(), isolate());
3881  }
3882 
3883  int cache_index = number_of_properties - 1;
3884  Handle<Object> maybe_cache(context->map_cache(), isolate());
3885  if (maybe_cache->IsUndefined(isolate())) {
3886  // Allocate the new map cache for the native context.
3887  maybe_cache = NewWeakFixedArray(kMapCacheSize, TENURED);
3888  context->set_map_cache(*maybe_cache);
3889  } else {
3890  // Check to see whether there is a matching element in the cache.
3891  Handle<WeakFixedArray> cache = Handle<WeakFixedArray>::cast(maybe_cache);
3892  MaybeObject result = cache->Get(cache_index);
3893  HeapObject* heap_object;
3894  if (result->GetHeapObjectIfWeak(&heap_object)) {
3895  Map map = Map::cast(heap_object);
3896  DCHECK(!map->is_dictionary_map());
3897  return handle(map, isolate());
3898  }
3899  }
3900 
3901  // Create a new map and add it to the cache.
3902  Handle<WeakFixedArray> cache = Handle<WeakFixedArray>::cast(maybe_cache);
3903  Handle<Map> map = Map::Create(isolate(), number_of_properties);
3904  DCHECK(!map->is_dictionary_map());
3905  cache->Set(cache_index, HeapObjectReference::Weak(*map));
3906  return map;
3907 }
3908 
3909 Handle<LoadHandler> Factory::NewLoadHandler(int data_count) {
3910  Handle<Map> map;
3911  switch (data_count) {
3912  case 1:
3913  map = load_handler1_map();
3914  break;
3915  case 2:
3916  map = load_handler2_map();
3917  break;
3918  case 3:
3919  map = load_handler3_map();
3920  break;
3921  default:
3922  UNREACHABLE();
3923  break;
3924  }
3925  return handle(LoadHandler::cast(New(map, TENURED)), isolate());
3926 }
3927 
3928 Handle<StoreHandler> Factory::NewStoreHandler(int data_count) {
3929  Handle<Map> map;
3930  switch (data_count) {
3931  case 0:
3932  map = store_handler0_map();
3933  break;
3934  case 1:
3935  map = store_handler1_map();
3936  break;
3937  case 2:
3938  map = store_handler2_map();
3939  break;
3940  case 3:
3941  map = store_handler3_map();
3942  break;
3943  default:
3944  UNREACHABLE();
3945  break;
3946  }
3947  return handle(StoreHandler::cast(New(map, TENURED)), isolate());
3948 }
3949 
3950 void Factory::SetRegExpAtomData(Handle<JSRegExp> regexp, JSRegExp::Type type,
3951  Handle<String> source, JSRegExp::Flags flags,
3952  Handle<Object> data) {
3953  Handle<FixedArray> store = NewFixedArray(JSRegExp::kAtomDataSize);
3954 
3955  store->set(JSRegExp::kTagIndex, Smi::FromInt(type));
3956  store->set(JSRegExp::kSourceIndex, *source);
3957  store->set(JSRegExp::kFlagsIndex, Smi::FromInt(flags));
3958  store->set(JSRegExp::kAtomPatternIndex, *data);
3959  regexp->set_data(*store);
3960 }
3961 
3962 void Factory::SetRegExpIrregexpData(Handle<JSRegExp> regexp,
3963  JSRegExp::Type type, Handle<String> source,
3964  JSRegExp::Flags flags, int capture_count) {
3965  Handle<FixedArray> store = NewFixedArray(JSRegExp::kIrregexpDataSize);
3966  Smi uninitialized = Smi::FromInt(JSRegExp::kUninitializedValue);
3967  store->set(JSRegExp::kTagIndex, Smi::FromInt(type));
3968  store->set(JSRegExp::kSourceIndex, *source);
3969  store->set(JSRegExp::kFlagsIndex, Smi::FromInt(flags));
3970  store->set(JSRegExp::kIrregexpLatin1CodeIndex, uninitialized);
3971  store->set(JSRegExp::kIrregexpUC16CodeIndex, uninitialized);
3972  store->set(JSRegExp::kIrregexpMaxRegisterCountIndex, Smi::kZero);
3973  store->set(JSRegExp::kIrregexpCaptureCountIndex, Smi::FromInt(capture_count));
3974  store->set(JSRegExp::kIrregexpCaptureNameMapIndex, uninitialized);
3975  regexp->set_data(*store);
3976 }
3977 
3978 Handle<RegExpMatchInfo> Factory::NewRegExpMatchInfo() {
3979  // Initially, the last match info consists of all fixed fields plus space for
3980  // the match itself (i.e., 2 capture indices).
3981  static const int kInitialSize = RegExpMatchInfo::kFirstCaptureIndex +
3982  RegExpMatchInfo::kInitialCaptureIndices;
3983 
3984  Handle<FixedArray> elems = NewFixedArray(kInitialSize);
3985  Handle<RegExpMatchInfo> result = Handle<RegExpMatchInfo>::cast(elems);
3986 
3987  result->SetNumberOfCaptureRegisters(RegExpMatchInfo::kInitialCaptureIndices);
3988  result->SetLastSubject(*empty_string());
3989  result->SetLastInput(*undefined_value());
3990  result->SetCapture(0, 0);
3991  result->SetCapture(1, 0);
3992 
3993  return result;
3994 }
3995 
3996 Handle<Object> Factory::GlobalConstantFor(Handle<Name> name) {
3997  if (Name::Equals(isolate(), name, undefined_string())) {
3998  return undefined_value();
3999  }
4000  if (Name::Equals(isolate(), name, NaN_string())) return nan_value();
4001  if (Name::Equals(isolate(), name, Infinity_string())) return infinity_value();
4002  return Handle<Object>::null();
4003 }
4004 
4005 Handle<Object> Factory::ToBoolean(bool value) {
4006  return value ? true_value() : false_value();
4007 }
4008 
4009 Handle<String> Factory::ToPrimitiveHintString(ToPrimitiveHint hint) {
4010  switch (hint) {
4011  case ToPrimitiveHint::kDefault:
4012  return default_string();
4013  case ToPrimitiveHint::kNumber:
4014  return number_string();
4015  case ToPrimitiveHint::kString:
4016  return string_string();
4017  }
4018  UNREACHABLE();
4019 }
4020 
4021 Handle<Map> Factory::CreateSloppyFunctionMap(
4022  FunctionMode function_mode, MaybeHandle<JSFunction> maybe_empty_function) {
4023  bool has_prototype = IsFunctionModeWithPrototype(function_mode);
4024  int header_size = has_prototype ? JSFunction::kSizeWithPrototype
4025  : JSFunction::kSizeWithoutPrototype;
4026  int descriptors_count = has_prototype ? 5 : 4;
4027  int inobject_properties_count = 0;
4028  if (IsFunctionModeWithName(function_mode)) ++inobject_properties_count;
4029 
4030  Handle<Map> map = NewMap(
4031  JS_FUNCTION_TYPE, header_size + inobject_properties_count * kPointerSize,
4032  TERMINAL_FAST_ELEMENTS_KIND, inobject_properties_count);
4033  map->set_has_prototype_slot(has_prototype);
4034  map->set_is_constructor(has_prototype);
4035  map->set_is_callable(true);
4036  Handle<JSFunction> empty_function;
4037  if (maybe_empty_function.ToHandle(&empty_function)) {
4038  Map::SetPrototype(isolate(), map, empty_function);
4039  }
4040 
4041  //
4042  // Setup descriptors array.
4043  //
4044  Map::EnsureDescriptorSlack(isolate(), map, descriptors_count);
4045 
4046  PropertyAttributes ro_attribs =
4047  static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
4048  PropertyAttributes rw_attribs =
4049  static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE);
4050  PropertyAttributes roc_attribs =
4051  static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY);
4052 
4053  int field_index = 0;
4054  STATIC_ASSERT(JSFunction::kLengthDescriptorIndex == 0);
4055  { // Add length accessor.
4056  Descriptor d = Descriptor::AccessorConstant(
4057  length_string(), function_length_accessor(), roc_attribs);
4058  map->AppendDescriptor(&d);
4059  }
4060 
4061  STATIC_ASSERT(JSFunction::kNameDescriptorIndex == 1);
4062  if (IsFunctionModeWithName(function_mode)) {
4063  // Add name field.
4064  Handle<Name> name = isolate()->factory()->name_string();
4065  Descriptor d = Descriptor::DataField(isolate(), name, field_index++,
4066  roc_attribs, Representation::Tagged());
4067  map->AppendDescriptor(&d);
4068 
4069  } else {
4070  // Add name accessor.
4071  Descriptor d = Descriptor::AccessorConstant(
4072  name_string(), function_name_accessor(), roc_attribs);
4073  map->AppendDescriptor(&d);
4074  }
4075  { // Add arguments accessor.
4076  Descriptor d = Descriptor::AccessorConstant(
4077  arguments_string(), function_arguments_accessor(), ro_attribs);
4078  map->AppendDescriptor(&d);
4079  }
4080  { // Add caller accessor.
4081  Descriptor d = Descriptor::AccessorConstant(
4082  caller_string(), function_caller_accessor(), ro_attribs);
4083  map->AppendDescriptor(&d);
4084  }
4085  if (IsFunctionModeWithPrototype(function_mode)) {
4086  // Add prototype accessor.
4087  PropertyAttributes attribs =
4088  IsFunctionModeWithWritablePrototype(function_mode) ? rw_attribs
4089  : ro_attribs;
4090  Descriptor d = Descriptor::AccessorConstant(
4091  prototype_string(), function_prototype_accessor(), attribs);
4092  map->AppendDescriptor(&d);
4093  }
4094  DCHECK_EQ(inobject_properties_count, field_index);
4095  LOG(isolate(), MapDetails(*map));
4096  return map;
4097 }
4098 
4099 Handle<Map> Factory::CreateStrictFunctionMap(
4100  FunctionMode function_mode, Handle<JSFunction> empty_function) {
4101  bool has_prototype = IsFunctionModeWithPrototype(function_mode);
4102  int header_size = has_prototype ? JSFunction::kSizeWithPrototype
4103  : JSFunction::kSizeWithoutPrototype;
4104  int inobject_properties_count = 0;
4105  if (IsFunctionModeWithName(function_mode)) ++inobject_properties_count;
4106  if (IsFunctionModeWithHomeObject(function_mode)) ++inobject_properties_count;
4107  int descriptors_count = (IsFunctionModeWithPrototype(function_mode) ? 3 : 2) +
4108  inobject_properties_count;
4109 
4110  Handle<Map> map = NewMap(
4111  JS_FUNCTION_TYPE, header_size + inobject_properties_count * kPointerSize,
4112  TERMINAL_FAST_ELEMENTS_KIND, inobject_properties_count);
4113  map->set_has_prototype_slot(has_prototype);
4114  map->set_is_constructor(has_prototype);
4115  map->set_is_callable(true);
4116  Map::SetPrototype(isolate(), map, empty_function);
4117 
4118  //
4119  // Setup descriptors array.
4120  //
4121  Map::EnsureDescriptorSlack(isolate(), map, descriptors_count);
4122 
4123  PropertyAttributes rw_attribs =
4124  static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE);
4125  PropertyAttributes ro_attribs =
4126  static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
4127  PropertyAttributes roc_attribs =
4128  static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY);
4129 
4130  int field_index = 0;
4131  STATIC_ASSERT(JSFunction::kLengthDescriptorIndex == 0);
4132  { // Add length accessor.
4133  Descriptor d = Descriptor::AccessorConstant(
4134  length_string(), function_length_accessor(), roc_attribs);
4135  map->AppendDescriptor(&d);
4136  }
4137 
4138  STATIC_ASSERT(JSFunction::kNameDescriptorIndex == 1);
4139  if (IsFunctionModeWithName(function_mode)) {
4140  // Add name field.
4141  Handle<Name> name = isolate()->factory()->name_string();
4142  Descriptor d = Descriptor::DataField(isolate(), name, field_index++,
4143  roc_attribs, Representation::Tagged());
4144  map->AppendDescriptor(&d);
4145 
4146  } else {
4147  // Add name accessor.
4148  Descriptor d = Descriptor::AccessorConstant(
4149  name_string(), function_name_accessor(), roc_attribs);
4150  map->AppendDescriptor(&d);
4151  }
4152 
4153  STATIC_ASSERT(JSFunction::kMaybeHomeObjectDescriptorIndex == 2);
4154  if (IsFunctionModeWithHomeObject(function_mode)) {
4155  // Add home object field.
4156  Handle<Name> name = isolate()->factory()->home_object_symbol();
4157  Descriptor d = Descriptor::DataField(isolate(), name, field_index++,
4158  DONT_ENUM, Representation::Tagged());
4159  map->AppendDescriptor(&d);
4160  }
4161 
4162  if (IsFunctionModeWithPrototype(function_mode)) {
4163  // Add prototype accessor.
4164  PropertyAttributes attribs =
4165  IsFunctionModeWithWritablePrototype(function_mode) ? rw_attribs
4166  : ro_attribs;
4167  Descriptor d = Descriptor::AccessorConstant(
4168  prototype_string(), function_prototype_accessor(), attribs);
4169  map->AppendDescriptor(&d);
4170  }
4171  DCHECK_EQ(inobject_properties_count, field_index);
4172  LOG(isolate(), MapDetails(*map));
4173  return map;
4174 }
4175 
4176 Handle<Map> Factory::CreateClassFunctionMap(Handle<JSFunction> empty_function) {
4177  Handle<Map> map = NewMap(JS_FUNCTION_TYPE, JSFunction::kSizeWithPrototype);
4178  map->set_has_prototype_slot(true);
4179  map->set_is_constructor(true);
4180  map->set_is_prototype_map(true);
4181  map->set_is_callable(true);
4182  Map::SetPrototype(isolate(), map, empty_function);
4183 
4184  //
4185  // Setup descriptors array.
4186  //
4187  Map::EnsureDescriptorSlack(isolate(), map, 2);
4188 
4189  PropertyAttributes ro_attribs =
4190  static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
4191  PropertyAttributes roc_attribs =
4192  static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY);
4193 
4194  STATIC_ASSERT(JSFunction::kLengthDescriptorIndex == 0);
4195  { // Add length accessor.
4196  Descriptor d = Descriptor::AccessorConstant(
4197  length_string(), function_length_accessor(), roc_attribs);
4198  map->AppendDescriptor(&d);
4199  }
4200 
4201  {
4202  // Add prototype accessor.
4203  Descriptor d = Descriptor::AccessorConstant(
4204  prototype_string(), function_prototype_accessor(), ro_attribs);
4205  map->AppendDescriptor(&d);
4206  }
4207  LOG(isolate(), MapDetails(*map));
4208  return map;
4209 }
4210 
4211 Handle<JSPromise> Factory::NewJSPromiseWithoutHook(PretenureFlag pretenure) {
4212  Handle<JSPromise> promise = Handle<JSPromise>::cast(
4213  NewJSObject(isolate()->promise_function(), pretenure));
4214  promise->set_reactions_or_result(Smi::kZero);
4215  promise->set_flags(0);
4216  for (int i = 0; i < v8::Promise::kEmbedderFieldCount; i++) {
4217  promise->SetEmbedderField(i, Smi::kZero);
4218  }
4219  return promise;
4220 }
4221 
4222 Handle<JSPromise> Factory::NewJSPromise(PretenureFlag pretenure) {
4223  Handle<JSPromise> promise = NewJSPromiseWithoutHook(pretenure);
4224  isolate()->RunPromiseHook(PromiseHookType::kInit, promise, undefined_value());
4225  return promise;
4226 }
4227 
4228 Handle<CallHandlerInfo> Factory::NewCallHandlerInfo(bool has_no_side_effect) {
4229  Handle<Map> map = has_no_side_effect
4230  ? side_effect_free_call_handler_info_map()
4231  : side_effect_call_handler_info_map();
4232  Handle<CallHandlerInfo> info(CallHandlerInfo::cast(New(map, TENURED)),
4233  isolate());
4234  Object* undefined_value = ReadOnlyRoots(isolate()).undefined_value();
4235  info->set_callback(undefined_value);
4236  info->set_js_callback(undefined_value);
4237  info->set_data(undefined_value);
4238  return info;
4239 }
4240 
4241 // static
4242 NewFunctionArgs NewFunctionArgs::ForWasm(
4243  Handle<String> name,
4244  Handle<WasmExportedFunctionData> exported_function_data, Handle<Map> map) {
4245  NewFunctionArgs args;
4246  args.name_ = name;
4247  args.maybe_map_ = map;
4248  args.maybe_exported_function_data_ = exported_function_data;
4249  args.language_mode_ = LanguageMode::kSloppy;
4250  args.prototype_mutability_ = MUTABLE;
4251 
4252  return args;
4253 }
4254 
4255 // static
4256 NewFunctionArgs NewFunctionArgs::ForBuiltin(Handle<String> name,
4257  Handle<Map> map, int builtin_id) {
4258  DCHECK(Builtins::IsBuiltinId(builtin_id));
4259 
4260  NewFunctionArgs args;
4261  args.name_ = name;
4262  args.maybe_map_ = map;
4263  args.maybe_builtin_id_ = builtin_id;
4264  args.language_mode_ = LanguageMode::kStrict;
4265  args.prototype_mutability_ = MUTABLE;
4266 
4267  args.SetShouldSetLanguageMode();
4268 
4269  return args;
4270 }
4271 
4272 // static
4273 NewFunctionArgs NewFunctionArgs::ForFunctionWithoutCode(
4274  Handle<String> name, Handle<Map> map, LanguageMode language_mode) {
4275  NewFunctionArgs args;
4276  args.name_ = name;
4277  args.maybe_map_ = map;
4278  args.maybe_builtin_id_ = Builtins::kIllegal;
4279  args.language_mode_ = language_mode;
4280  args.prototype_mutability_ = MUTABLE;
4281 
4282  args.SetShouldSetLanguageMode();
4283 
4284  return args;
4285 }
4286 
4287 // static
4288 NewFunctionArgs NewFunctionArgs::ForBuiltinWithPrototype(
4289  Handle<String> name, Handle<Object> prototype, InstanceType type,
4290  int instance_size, int inobject_properties, int builtin_id,
4291  MutableMode prototype_mutability) {
4292  DCHECK(Builtins::IsBuiltinId(builtin_id));
4293 
4294  NewFunctionArgs args;
4295  args.name_ = name;
4296  args.type_ = type;
4297  args.instance_size_ = instance_size;
4298  args.inobject_properties_ = inobject_properties;
4299  args.maybe_prototype_ = prototype;
4300  args.maybe_builtin_id_ = builtin_id;
4301  args.language_mode_ = LanguageMode::kStrict;
4302  args.prototype_mutability_ = prototype_mutability;
4303 
4304  args.SetShouldCreateAndSetInitialMap();
4305  args.SetShouldSetPrototype();
4306  args.SetShouldSetLanguageMode();
4307 
4308  return args;
4309 }
4310 
4311 // static
4312 NewFunctionArgs NewFunctionArgs::ForBuiltinWithoutPrototype(
4313  Handle<String> name, int builtin_id, LanguageMode language_mode) {
4314  DCHECK(Builtins::IsBuiltinId(builtin_id));
4315 
4316  NewFunctionArgs args;
4317  args.name_ = name;
4318  args.maybe_builtin_id_ = builtin_id;
4319  args.language_mode_ = language_mode;
4320  args.prototype_mutability_ = MUTABLE;
4321 
4322  args.SetShouldSetLanguageMode();
4323 
4324  return args;
4325 }
4326 
4327 void NewFunctionArgs::SetShouldCreateAndSetInitialMap() {
4328  // Needed to create the initial map.
4329  maybe_prototype_.Assert();
4330  DCHECK_NE(kUninitialized, instance_size_);
4331  DCHECK_NE(kUninitialized, inobject_properties_);
4332 
4333  should_create_and_set_initial_map_ = true;
4334 }
4335 
4336 void NewFunctionArgs::SetShouldSetPrototype() {
4337  maybe_prototype_.Assert();
4338  should_set_prototype_ = true;
4339 }
4340 
4341 void NewFunctionArgs::SetShouldSetLanguageMode() {
4342  DCHECK(language_mode_ == LanguageMode::kStrict ||
4343  language_mode_ == LanguageMode::kSloppy);
4344  should_set_language_mode_ = true;
4345 }
4346 
4347 Handle<Map> NewFunctionArgs::GetMap(Isolate* isolate) const {
4348  if (!maybe_map_.is_null()) {
4349  return maybe_map_.ToHandleChecked();
4350  } else if (maybe_prototype_.is_null()) {
4351  return is_strict(language_mode_)
4352  ? isolate->strict_function_without_prototype_map()
4353  : isolate->sloppy_function_without_prototype_map();
4354  } else {
4355  DCHECK(!maybe_prototype_.is_null());
4356  switch (prototype_mutability_) {
4357  case MUTABLE:
4358  return is_strict(language_mode_) ? isolate->strict_function_map()
4359  : isolate->sloppy_function_map();
4360  case IMMUTABLE:
4361  return is_strict(language_mode_)
4362  ? isolate->strict_function_with_readonly_prototype_map()
4363  : isolate->sloppy_function_with_readonly_prototype_map();
4364  }
4365  }
4366  UNREACHABLE();
4367 }
4368 
4369 } // namespace internal
4370 } // namespace v8
Definition: libplatform.h:13