V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
map-inl.h
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_MAP_INL_H_
6 #define V8_OBJECTS_MAP_INL_H_
7 
8 #include "src/objects/map.h"
9 
10 #include "src/field-type.h"
11 #include "src/heap/heap-inl.h"
12 #include "src/layout-descriptor-inl.h"
13 #include "src/objects-inl.h"
14 #include "src/objects/api-callbacks-inl.h"
15 #include "src/objects/descriptor-array.h"
16 #include "src/objects/instance-type-inl.h"
17 #include "src/objects/prototype-info-inl.h"
18 #include "src/objects/shared-function-info.h"
19 #include "src/objects/templates-inl.h"
20 #include "src/property.h"
21 #include "src/transitions.h"
22 
23 // Has to be the last include (doesn't have include guards):
24 #include "src/objects/object-macros.h"
25 
26 namespace v8 {
27 namespace internal {
28 
29 OBJECT_CONSTRUCTORS_IMPL(Map, HeapObjectPtr)
30 CAST_ACCESSOR2(Map)
31 
32 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
33 // A freshly allocated layout descriptor can be set on an existing map.
34 // We need to use release-store and acquire-load accessor pairs to ensure
35 // that the concurrent marking thread observes initializing stores of the
36 // layout descriptor.
37 SYNCHRONIZED_ACCESSORS_CHECKED(Map, layout_descriptor, LayoutDescriptor,
38  kLayoutDescriptorOffset,
39  FLAG_unbox_double_fields)
40 WEAK_ACCESSORS(Map, raw_transitions, kTransitionsOrPrototypeInfoOffset)
41 
42 // |bit_field| fields.
43 BIT_FIELD_ACCESSORS(Map, bit_field, has_non_instance_prototype,
44  Map::HasNonInstancePrototypeBit)
45 BIT_FIELD_ACCESSORS(Map, bit_field, is_callable, Map::IsCallableBit)
46 BIT_FIELD_ACCESSORS(Map, bit_field, has_named_interceptor,
47  Map::HasNamedInterceptorBit)
48 BIT_FIELD_ACCESSORS(Map, bit_field, has_indexed_interceptor,
49  Map::HasIndexedInterceptorBit)
50 BIT_FIELD_ACCESSORS(Map, bit_field, is_undetectable, Map::IsUndetectableBit)
51 BIT_FIELD_ACCESSORS(Map, bit_field, is_access_check_needed,
52  Map::IsAccessCheckNeededBit)
53 BIT_FIELD_ACCESSORS(Map, bit_field, is_constructor, Map::IsConstructorBit)
54 BIT_FIELD_ACCESSORS(Map, bit_field, has_prototype_slot,
55  Map::HasPrototypeSlotBit)
56 
57 // |bit_field2| fields.
58 BIT_FIELD_ACCESSORS(Map, bit_field2, is_extensible, Map::IsExtensibleBit)
59 BIT_FIELD_ACCESSORS(Map, bit_field2, is_prototype_map, Map::IsPrototypeMapBit)
60 BIT_FIELD_ACCESSORS(Map, bit_field2, is_in_retained_map_list,
61  Map::IsInRetainedMapListBit)
62 
63 // |bit_field3| fields.
64 BIT_FIELD_ACCESSORS(Map, bit_field3, owns_descriptors, Map::OwnsDescriptorsBit)
65 BIT_FIELD_ACCESSORS(Map, bit_field3, has_hidden_prototype,
66  Map::HasHiddenPrototypeBit)
67 BIT_FIELD_ACCESSORS(Map, bit_field3, is_deprecated, Map::IsDeprecatedBit)
68 BIT_FIELD_ACCESSORS(Map, bit_field3, is_migration_target,
69  Map::IsMigrationTargetBit)
70 BIT_FIELD_ACCESSORS(Map, bit_field3, is_immutable_proto,
71  Map::IsImmutablePrototypeBit)
72 BIT_FIELD_ACCESSORS(Map, bit_field3, new_target_is_base,
73  Map::NewTargetIsBaseBit)
74 BIT_FIELD_ACCESSORS(Map, bit_field3, may_have_interesting_symbols,
75  Map::MayHaveInterestingSymbolsBit)
76 BIT_FIELD_ACCESSORS(Map, bit_field3, construction_counter,
77  Map::ConstructionCounterBits)
78 
79 InterceptorInfo* Map::GetNamedInterceptor() {
80  DCHECK(has_named_interceptor());
81  FunctionTemplateInfo* info = GetFunctionTemplateInfo();
82  return InterceptorInfo::cast(info->GetNamedPropertyHandler());
83 }
84 
85 InterceptorInfo* Map::GetIndexedInterceptor() {
86  DCHECK(has_indexed_interceptor());
87  FunctionTemplateInfo* info = GetFunctionTemplateInfo();
88  return InterceptorInfo::cast(info->GetIndexedPropertyHandler());
89 }
90 
91 bool Map::IsInplaceGeneralizableField(PropertyConstness constness,
92  Representation representation,
93  FieldType field_type) {
94  if (FLAG_track_constant_fields && FLAG_modify_map_inplace &&
95  (constness == PropertyConstness::kConst)) {
96  // VariableMode::kConst -> PropertyConstness::kMutable field generalization
97  // may happen in-place.
98  return true;
99  }
100  if (representation.IsHeapObject() && !field_type->IsAny()) {
101  return true;
102  }
103  return false;
104 }
105 
106 bool Map::CanHaveFastTransitionableElementsKind(InstanceType instance_type) {
107  return instance_type == JS_ARRAY_TYPE || instance_type == JS_VALUE_TYPE ||
108  instance_type == JS_ARGUMENTS_TYPE;
109 }
110 
111 bool Map::CanHaveFastTransitionableElementsKind() const {
112  return CanHaveFastTransitionableElementsKind(instance_type());
113 }
114 
115 // static
116 void Map::GeneralizeIfCanHaveTransitionableFastElementsKind(
117  Isolate* isolate, InstanceType instance_type, PropertyConstness* constness,
118  Representation* representation, Handle<FieldType>* field_type) {
119  if (CanHaveFastTransitionableElementsKind(instance_type)) {
120  // We don't support propagation of field generalization through elements
121  // kind transitions because they are inserted into the transition tree
122  // before field transitions. In order to avoid complexity of handling
123  // such a case we ensure that all maps with transitionable elements kinds
124  // do not have fields that can be generalized in-place (without creation
125  // of a new map).
126  if (FLAG_track_constant_fields && FLAG_modify_map_inplace) {
127  // The constness is either already PropertyConstness::kMutable or should
128  // become PropertyConstness::kMutable if it was VariableMode::kConst.
129  *constness = PropertyConstness::kMutable;
130  }
131  if (representation->IsHeapObject()) {
132  // The field type is either already Any or should become Any if it was
133  // something else.
134  *field_type = FieldType::Any(isolate);
135  }
136  }
137 }
138 
139 bool Map::IsUnboxedDoubleField(FieldIndex index) const {
140  if (!FLAG_unbox_double_fields) return false;
141  if (index.is_hidden_field() || !index.is_inobject()) return false;
142  return !layout_descriptor()->IsTagged(index.property_index());
143 }
144 
145 bool Map::TooManyFastProperties(StoreOrigin store_origin) const {
146  if (UnusedPropertyFields() != 0) return false;
147  if (is_prototype_map()) return false;
148  if (store_origin == StoreOrigin::kNamed) {
149  int limit = Max(kMaxFastProperties, GetInObjectProperties());
150  FieldCounts counts = GetFieldCounts();
151  // Only count mutable fields so that objects with large numbers of
152  // constant functions do not go to dictionary mode. That would be bad
153  // because such objects have often been used as modules.
154  int external = counts.mutable_count() - GetInObjectProperties();
155  return external > limit || counts.GetTotal() > kMaxNumberOfDescriptors;
156  } else {
157  int limit = Max(kFastPropertiesSoftLimit, GetInObjectProperties());
158  int external = NumberOfFields() - GetInObjectProperties();
159  return external > limit;
160  }
161 }
162 
163 PropertyDetails Map::GetLastDescriptorDetails() const {
164  return instance_descriptors()->GetDetails(LastAdded());
165 }
166 
167 int Map::LastAdded() const {
168  int number_of_own_descriptors = NumberOfOwnDescriptors();
169  DCHECK_GT(number_of_own_descriptors, 0);
170  return number_of_own_descriptors - 1;
171 }
172 
173 int Map::NumberOfOwnDescriptors() const {
174  return NumberOfOwnDescriptorsBits::decode(bit_field3());
175 }
176 
177 void Map::SetNumberOfOwnDescriptors(int number) {
178  DCHECK_LE(number, instance_descriptors()->number_of_descriptors());
179  CHECK_LE(static_cast<unsigned>(number),
180  static_cast<unsigned>(kMaxNumberOfDescriptors));
181  set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
182 }
183 
184 int Map::EnumLength() const { return EnumLengthBits::decode(bit_field3()); }
185 
186 void Map::SetEnumLength(int length) {
187  if (length != kInvalidEnumCacheSentinel) {
188  DCHECK_LE(length, NumberOfOwnDescriptors());
189  CHECK_LE(static_cast<unsigned>(length),
190  static_cast<unsigned>(kMaxNumberOfDescriptors));
191  }
192  set_bit_field3(EnumLengthBits::update(bit_field3(), length));
193 }
194 
195 FixedArrayBase Map::GetInitialElements() const {
196  FixedArrayBase result;
197  if (has_fast_elements() || has_fast_string_wrapper_elements()) {
198  result = GetReadOnlyRoots().empty_fixed_array();
199  } else if (has_fast_sloppy_arguments_elements()) {
200  result = GetReadOnlyRoots().empty_sloppy_arguments_elements();
201  } else if (has_fixed_typed_array_elements()) {
202  result = GetReadOnlyRoots().EmptyFixedTypedArrayForMap(*this);
203  } else if (has_dictionary_elements()) {
204  result = GetReadOnlyRoots().empty_slow_element_dictionary();
205  } else {
206  UNREACHABLE();
207  }
208  DCHECK(!Heap::InNewSpace(result));
209  return result;
210 }
211 
212 VisitorId Map::visitor_id() const {
213  return static_cast<VisitorId>(
214  RELAXED_READ_BYTE_FIELD(this, kVisitorIdOffset));
215 }
216 
217 void Map::set_visitor_id(VisitorId id) {
218  CHECK_LT(static_cast<unsigned>(id), 256);
219  RELAXED_WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
220 }
221 
222 int Map::instance_size_in_words() const {
223  return RELAXED_READ_BYTE_FIELD(this, kInstanceSizeInWordsOffset);
224 }
225 
226 void Map::set_instance_size_in_words(int value) {
227  RELAXED_WRITE_BYTE_FIELD(this, kInstanceSizeInWordsOffset,
228  static_cast<byte>(value));
229 }
230 
231 int Map::instance_size() const {
232  return instance_size_in_words() << kTaggedSizeLog2;
233 }
234 
235 void Map::set_instance_size(int value) {
236  CHECK(IsAligned(value, kTaggedSize));
237  value >>= kTaggedSizeLog2;
238  CHECK_LT(static_cast<unsigned>(value), 256);
239  set_instance_size_in_words(value);
240 }
241 
242 int Map::inobject_properties_start_or_constructor_function_index() const {
243  return RELAXED_READ_BYTE_FIELD(
244  this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset);
245 }
246 
247 void Map::set_inobject_properties_start_or_constructor_function_index(
248  int value) {
249  CHECK_LT(static_cast<unsigned>(value), 256);
250  RELAXED_WRITE_BYTE_FIELD(
251  this, kInObjectPropertiesStartOrConstructorFunctionIndexOffset,
252  static_cast<byte>(value));
253 }
254 
255 int Map::GetInObjectPropertiesStartInWords() const {
256  DCHECK(IsJSObjectMap());
257  return inobject_properties_start_or_constructor_function_index();
258 }
259 
260 void Map::SetInObjectPropertiesStartInWords(int value) {
261  CHECK(IsJSObjectMap());
262  set_inobject_properties_start_or_constructor_function_index(value);
263 }
264 
265 int Map::GetInObjectProperties() const {
266  DCHECK(IsJSObjectMap());
267  return instance_size_in_words() - GetInObjectPropertiesStartInWords();
268 }
269 
270 int Map::GetConstructorFunctionIndex() const {
271  DCHECK(IsPrimitiveMap());
272  return inobject_properties_start_or_constructor_function_index();
273 }
274 
275 void Map::SetConstructorFunctionIndex(int value) {
276  CHECK(IsPrimitiveMap());
277  set_inobject_properties_start_or_constructor_function_index(value);
278 }
279 
280 int Map::GetInObjectPropertyOffset(int index) const {
281  return (GetInObjectPropertiesStartInWords() + index) * kTaggedSize;
282 }
283 
284 Handle<Map> Map::AddMissingTransitionsForTesting(
285  Isolate* isolate, Handle<Map> split_map,
286  Handle<DescriptorArray> descriptors,
287  Handle<LayoutDescriptor> full_layout_descriptor) {
288  return AddMissingTransitions(isolate, split_map, descriptors,
289  full_layout_descriptor);
290 }
291 
292 InstanceType Map::instance_type() const {
293  return static_cast<InstanceType>(
294  READ_UINT16_FIELD(this, kInstanceTypeOffset));
295 }
296 
297 void Map::set_instance_type(InstanceType value) {
298  WRITE_UINT16_FIELD(this, kInstanceTypeOffset, value);
299 }
300 
301 int Map::UnusedPropertyFields() const {
302  int value = used_or_unused_instance_size_in_words();
303  DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
304  int unused;
305  if (value >= JSObject::kFieldsAdded) {
306  unused = instance_size_in_words() - value;
307  } else {
308  // For out of object properties "used_or_unused_instance_size_in_words"
309  // byte encodes the slack in the property array.
310  unused = value;
311  }
312  return unused;
313 }
314 
315 int Map::UnusedInObjectProperties() const {
316  // Like Map::UnusedPropertyFields(), but returns 0 for out of object
317  // properties.
318  int value = used_or_unused_instance_size_in_words();
319  DCHECK_IMPLIES(!IsJSObjectMap(), value == 0);
320  if (value >= JSObject::kFieldsAdded) {
321  return instance_size_in_words() - value;
322  }
323  return 0;
324 }
325 
326 int Map::used_or_unused_instance_size_in_words() const {
327  return RELAXED_READ_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset);
328 }
329 
330 void Map::set_used_or_unused_instance_size_in_words(int value) {
331  CHECK_LE(static_cast<unsigned>(value), 255);
332  RELAXED_WRITE_BYTE_FIELD(this, kUsedOrUnusedInstanceSizeInWordsOffset,
333  static_cast<byte>(value));
334 }
335 
336 int Map::UsedInstanceSize() const {
337  int words = used_or_unused_instance_size_in_words();
338  if (words < JSObject::kFieldsAdded) {
339  // All in-object properties are used and the words is tracking the slack
340  // in the property array.
341  return instance_size();
342  }
343  return words * kTaggedSize;
344 }
345 
346 void Map::SetInObjectUnusedPropertyFields(int value) {
347  STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
348  if (!IsJSObjectMap()) {
349  CHECK_EQ(0, value);
350  set_used_or_unused_instance_size_in_words(0);
351  DCHECK_EQ(0, UnusedPropertyFields());
352  return;
353  }
354  CHECK_LE(0, value);
355  DCHECK_LE(value, GetInObjectProperties());
356  int used_inobject_properties = GetInObjectProperties() - value;
357  set_used_or_unused_instance_size_in_words(
358  GetInObjectPropertyOffset(used_inobject_properties) / kTaggedSize);
359  DCHECK_EQ(value, UnusedPropertyFields());
360 }
361 
362 void Map::SetOutOfObjectUnusedPropertyFields(int value) {
363  STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
364  CHECK_LT(static_cast<unsigned>(value), JSObject::kFieldsAdded);
365  // For out of object properties "used_instance_size_in_words" byte encodes
366  // the slack in the property array.
367  set_used_or_unused_instance_size_in_words(value);
368  DCHECK_EQ(value, UnusedPropertyFields());
369 }
370 
371 void Map::CopyUnusedPropertyFields(Map map) {
372  set_used_or_unused_instance_size_in_words(
373  map->used_or_unused_instance_size_in_words());
374  DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
375 }
376 
377 void Map::CopyUnusedPropertyFieldsAdjustedForInstanceSize(Map map) {
378  int value = map->used_or_unused_instance_size_in_words();
379  if (value >= JSValue::kFieldsAdded) {
380  // Unused in-object fields. Adjust the offset from the object’s start
381  // so it matches the distance to the object’s end.
382  value += instance_size_in_words() - map->instance_size_in_words();
383  }
384  set_used_or_unused_instance_size_in_words(value);
385  DCHECK_EQ(UnusedPropertyFields(), map->UnusedPropertyFields());
386 }
387 
388 void Map::AccountAddedPropertyField() {
389  // Update used instance size and unused property fields number.
390  STATIC_ASSERT(JSObject::kFieldsAdded == JSObject::kHeaderSize / kTaggedSize);
391 #ifdef DEBUG
392  int new_unused = UnusedPropertyFields() - 1;
393  if (new_unused < 0) new_unused += JSObject::kFieldsAdded;
394 #endif
395  int value = used_or_unused_instance_size_in_words();
396  if (value >= JSObject::kFieldsAdded) {
397  if (value == instance_size_in_words()) {
398  AccountAddedOutOfObjectPropertyField(0);
399  } else {
400  // The property is added in-object, so simply increment the counter.
401  set_used_or_unused_instance_size_in_words(value + 1);
402  }
403  } else {
404  AccountAddedOutOfObjectPropertyField(value);
405  }
406  DCHECK_EQ(new_unused, UnusedPropertyFields());
407 }
408 
409 void Map::AccountAddedOutOfObjectPropertyField(int unused_in_property_array) {
410  unused_in_property_array--;
411  if (unused_in_property_array < 0) {
412  unused_in_property_array += JSObject::kFieldsAdded;
413  }
414  CHECK_LT(static_cast<unsigned>(unused_in_property_array),
415  JSObject::kFieldsAdded);
416  set_used_or_unused_instance_size_in_words(unused_in_property_array);
417  DCHECK_EQ(unused_in_property_array, UnusedPropertyFields());
418 }
419 
420 byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
421 
422 void Map::set_bit_field(byte value) {
423  WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
424 }
425 
426 byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
427 
428 void Map::set_bit_field2(byte value) {
429  WRITE_BYTE_FIELD(this, kBitField2Offset, value);
430 }
431 
432 bool Map::is_abandoned_prototype_map() const {
433  return is_prototype_map() && !owns_descriptors();
434 }
435 
436 bool Map::should_be_fast_prototype_map() const {
437  if (!prototype_info()->IsPrototypeInfo()) return false;
438  return PrototypeInfo::cast(prototype_info())->should_be_fast_map();
439 }
440 
441 void Map::set_elements_kind(ElementsKind elements_kind) {
442  CHECK_LT(static_cast<int>(elements_kind), kElementsKindCount);
443  set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
444 }
445 
446 ElementsKind Map::elements_kind() const {
447  return Map::ElementsKindBits::decode(bit_field2());
448 }
449 
450 bool Map::has_fast_smi_elements() const {
451  return IsSmiElementsKind(elements_kind());
452 }
453 
454 bool Map::has_fast_object_elements() const {
455  return IsObjectElementsKind(elements_kind());
456 }
457 
458 bool Map::has_fast_smi_or_object_elements() const {
459  return IsSmiOrObjectElementsKind(elements_kind());
460 }
461 
462 bool Map::has_fast_double_elements() const {
463  return IsDoubleElementsKind(elements_kind());
464 }
465 
466 bool Map::has_fast_elements() const {
467  return IsFastElementsKind(elements_kind());
468 }
469 
470 bool Map::has_sloppy_arguments_elements() const {
471  return IsSloppyArgumentsElementsKind(elements_kind());
472 }
473 
474 bool Map::has_fast_sloppy_arguments_elements() const {
475  return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
476 }
477 
478 bool Map::has_fast_string_wrapper_elements() const {
479  return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
480 }
481 
482 bool Map::has_fixed_typed_array_elements() const {
483  return IsFixedTypedArrayElementsKind(elements_kind());
484 }
485 
486 bool Map::has_dictionary_elements() const {
487  return IsDictionaryElementsKind(elements_kind());
488 }
489 
490 void Map::set_is_dictionary_map(bool value) {
491  uint32_t new_bit_field3 = IsDictionaryMapBit::update(bit_field3(), value);
492  new_bit_field3 = IsUnstableBit::update(new_bit_field3, value);
493  set_bit_field3(new_bit_field3);
494 }
495 
496 bool Map::is_dictionary_map() const {
497  return IsDictionaryMapBit::decode(bit_field3());
498 }
499 
500 void Map::mark_unstable() {
501  set_bit_field3(IsUnstableBit::update(bit_field3(), true));
502 }
503 
504 bool Map::is_stable() const { return !IsUnstableBit::decode(bit_field3()); }
505 
506 bool Map::CanBeDeprecated() const {
507  int descriptor = LastAdded();
508  for (int i = 0; i <= descriptor; i++) {
509  PropertyDetails details = instance_descriptors()->GetDetails(i);
510  if (details.representation().IsNone()) return true;
511  if (details.representation().IsSmi()) return true;
512  if (details.representation().IsDouble()) return true;
513  if (details.representation().IsHeapObject()) return true;
514  if (details.kind() == kData && details.location() == kDescriptor) {
515  return true;
516  }
517  }
518  return false;
519 }
520 
521 void Map::NotifyLeafMapLayoutChange(Isolate* isolate) {
522  if (is_stable()) {
523  mark_unstable();
524  dependent_code()->DeoptimizeDependentCodeGroup(
525  isolate, DependentCode::kPrototypeCheckGroup);
526  }
527 }
528 
529 bool Map::CanTransition() const {
530  // Only JSObject and subtypes have map transitions and back pointers.
531  return InstanceTypeChecker::IsJSObject(instance_type());
532 }
533 
534 #define DEF_TESTER(Type, ...) \
535  bool Map::Is##Type##Map() const { \
536  return InstanceTypeChecker::Is##Type(instance_type()); \
537  }
538 INSTANCE_TYPE_CHECKERS(DEF_TESTER)
539 #undef DEF_TESTER
540 
541 bool Map::IsBooleanMap() const {
542  return *this == GetReadOnlyRoots().boolean_map();
543 }
544 
545 bool Map::IsNullOrUndefinedMap() const {
546  return *this == GetReadOnlyRoots().null_map() ||
547  *this == GetReadOnlyRoots().undefined_map();
548 }
549 
550 bool Map::IsPrimitiveMap() const {
551  return instance_type() <= LAST_PRIMITIVE_TYPE;
552 }
553 
554 Object* Map::prototype() const { return READ_FIELD(this, kPrototypeOffset); }
555 
556 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
557  DCHECK(value->IsNull() || value->IsJSReceiver());
558  WRITE_FIELD(this, kPrototypeOffset, value);
559  CONDITIONAL_WRITE_BARRIER(this, kPrototypeOffset, value, mode);
560 }
561 
562 LayoutDescriptor Map::layout_descriptor_gc_safe() const {
563  DCHECK(FLAG_unbox_double_fields);
564  // The loaded value can be dereferenced on background thread to load the
565  // bitmap. We need acquire load in order to ensure that the bitmap
566  // initializing stores are also visible to the background thread.
567  Object* layout_desc = ACQUIRE_READ_FIELD(this, kLayoutDescriptorOffset);
568  return LayoutDescriptor::cast_gc_safe(layout_desc);
569 }
570 
571 bool Map::HasFastPointerLayout() const {
572  DCHECK(FLAG_unbox_double_fields);
573  // The loaded value is used for SMI check only and is not dereferenced,
574  // so relaxed load is safe.
575  Object* layout_desc = RELAXED_READ_FIELD(this, kLayoutDescriptorOffset);
576  return LayoutDescriptor::IsFastPointerLayout(layout_desc);
577 }
578 
579 void Map::UpdateDescriptors(DescriptorArray* descriptors,
580  LayoutDescriptor layout_desc) {
581  set_instance_descriptors(descriptors);
582  if (FLAG_unbox_double_fields) {
583  if (layout_descriptor()->IsSlowLayout()) {
584  set_layout_descriptor(layout_desc);
585  }
586 #ifdef VERIFY_HEAP
587  // TODO(ishell): remove these checks from VERIFY_HEAP mode.
588  if (FLAG_verify_heap) {
589  CHECK(layout_descriptor()->IsConsistentWithMap(*this));
590  CHECK_EQ(Map::GetVisitorId(*this), visitor_id());
591  }
592 #else
593  SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(*this));
594  DCHECK(visitor_id() == Map::GetVisitorId(*this));
595 #endif
596  }
597 }
598 
599 void Map::InitializeDescriptors(DescriptorArray* descriptors,
600  LayoutDescriptor layout_desc) {
601  int len = descriptors->number_of_descriptors();
602  set_instance_descriptors(descriptors);
603  SetNumberOfOwnDescriptors(len);
604 
605  if (FLAG_unbox_double_fields) {
606  set_layout_descriptor(layout_desc);
607 #ifdef VERIFY_HEAP
608  // TODO(ishell): remove these checks from VERIFY_HEAP mode.
609  if (FLAG_verify_heap) {
610  CHECK(layout_descriptor()->IsConsistentWithMap(*this));
611  }
612 #else
613  SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(*this));
614 #endif
615  set_visitor_id(Map::GetVisitorId(*this));
616  }
617 }
618 
619 void Map::set_bit_field3(uint32_t bits) {
620  if (kInt32Size != kTaggedSize) {
621  WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
622  }
623  WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
624 }
625 
626 uint32_t Map::bit_field3() const {
627  return READ_UINT32_FIELD(this, kBitField3Offset);
628 }
629 
630 LayoutDescriptor Map::GetLayoutDescriptor() const {
631  return FLAG_unbox_double_fields ? layout_descriptor()
632  : LayoutDescriptor::FastPointerLayout();
633 }
634 
635 void Map::AppendDescriptor(Descriptor* desc) {
636  DescriptorArray* descriptors = instance_descriptors();
637  int number_of_own_descriptors = NumberOfOwnDescriptors();
638  DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
639  descriptors->Append(desc);
640  SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
641 
642  // Properly mark the map if the {desc} is an "interesting symbol".
643  if (desc->GetKey()->IsInterestingSymbol()) {
644  set_may_have_interesting_symbols(true);
645  }
646  PropertyDetails details = desc->GetDetails();
647  if (details.location() == kField) {
648  DCHECK_GT(UnusedPropertyFields(), 0);
649  AccountAddedPropertyField();
650  }
651 
652 // This function does not support appending double field descriptors and
653 // it should never try to (otherwise, layout descriptor must be updated too).
654 #ifdef DEBUG
655  DCHECK(details.location() != kField || !details.representation().IsDouble());
656 #endif
657 }
658 
659 Object* Map::GetBackPointer() const {
660  Object* object = constructor_or_backpointer();
661  if (object->IsMap()) {
662  return object;
663  }
664  return GetReadOnlyRoots().undefined_value();
665 }
666 
667 Map Map::ElementsTransitionMap() {
668  DisallowHeapAllocation no_gc;
669  // TODO(delphick): While it's safe to pass nullptr for Isolate* here as
670  // SearchSpecial doesn't need it, this is really ugly. Perhaps factor out a
671  // base class for methods not requiring an Isolate?
672  return TransitionsAccessor(nullptr, *this, &no_gc)
673  .SearchSpecial(GetReadOnlyRoots().elements_transition_symbol());
674 }
675 
676 Object* Map::prototype_info() const {
677  DCHECK(is_prototype_map());
678  return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
679 }
680 
681 void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
682  CHECK(is_prototype_map());
683  WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
684  CONDITIONAL_WRITE_BARRIER(this, Map::kTransitionsOrPrototypeInfoOffset, value,
685  mode);
686 }
687 
688 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
689  CHECK_GE(instance_type(), FIRST_JS_RECEIVER_TYPE);
690  CHECK(value->IsMap());
691  CHECK(GetBackPointer()->IsUndefined());
692  CHECK_IMPLIES(value->IsMap(), Map::cast(value)->GetConstructor() ==
693  constructor_or_backpointer());
694  set_constructor_or_backpointer(value, mode);
695 }
696 
697 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
698 ACCESSORS(Map, prototype_validity_cell, Object, kPrototypeValidityCellOffset)
699 ACCESSORS(Map, constructor_or_backpointer, Object,
700  kConstructorOrBackPointerOffset)
701 
702 bool Map::IsPrototypeValidityCellValid() const {
703  Object* validity_cell = prototype_validity_cell();
704  Object* value = validity_cell->IsSmi() ? Smi::cast(validity_cell)
705  : Cell::cast(validity_cell)->value();
706  return value == Smi::FromInt(Map::kPrototypeChainValid);
707 }
708 
709 Object* Map::GetConstructor() const {
710  Object* maybe_constructor = constructor_or_backpointer();
711  // Follow any back pointers.
712  while (maybe_constructor->IsMap()) {
713  maybe_constructor =
714  Map::cast(maybe_constructor)->constructor_or_backpointer();
715  }
716  return maybe_constructor;
717 }
718 
719 FunctionTemplateInfo* Map::GetFunctionTemplateInfo() const {
720  Object* constructor = GetConstructor();
721  if (constructor->IsJSFunction()) {
722  DCHECK(JSFunction::cast(constructor)->shared()->IsApiFunction());
723  return JSFunction::cast(constructor)->shared()->get_api_func_data();
724  }
725  DCHECK(constructor->IsFunctionTemplateInfo());
726  return FunctionTemplateInfo::cast(constructor);
727 }
728 
729 void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
730  // Never overwrite a back pointer with a constructor.
731  CHECK(!constructor_or_backpointer()->IsMap());
732  set_constructor_or_backpointer(constructor, mode);
733 }
734 
735 Handle<Map> Map::CopyInitialMap(Isolate* isolate, Handle<Map> map) {
736  return CopyInitialMap(isolate, map, map->instance_size(),
737  map->GetInObjectProperties(),
738  map->UnusedPropertyFields());
739 }
740 
741 bool Map::IsInobjectSlackTrackingInProgress() const {
742  return construction_counter() != Map::kNoSlackTracking;
743 }
744 
745 void Map::InobjectSlackTrackingStep(Isolate* isolate) {
746  // Slack tracking should only be performed on an initial map.
747  DCHECK(GetBackPointer()->IsUndefined());
748  if (!IsInobjectSlackTrackingInProgress()) return;
749  int counter = construction_counter();
750  set_construction_counter(counter - 1);
751  if (counter == kSlackTrackingCounterEnd) {
752  CompleteInobjectSlackTracking(isolate);
753  }
754 }
755 
756 int Map::SlackForArraySize(int old_size, int size_limit) {
757  const int max_slack = size_limit - old_size;
758  CHECK_LE(0, max_slack);
759  if (old_size < 4) {
760  DCHECK_LE(1, max_slack);
761  return 1;
762  }
763  return Min(max_slack, old_size / 4);
764 }
765 
766 int NormalizedMapCache::GetIndex(Handle<Map> map) {
767  return map->Hash() % NormalizedMapCache::kEntries;
768 }
769 
770 bool NormalizedMapCache::IsNormalizedMapCache(const HeapObject* obj) {
771  if (!obj->IsWeakFixedArray()) return false;
772  if (WeakFixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
773  return false;
774  }
775 #ifdef VERIFY_HEAP
776  if (FLAG_verify_heap) {
777  NormalizedMapCache* cache =
778  reinterpret_cast<NormalizedMapCache*>(const_cast<HeapObject*>(obj));
779  cache->NormalizedMapCacheVerify(cache->GetIsolate());
780  }
781 #endif
782  return true;
783 }
784 
785 } // namespace internal
786 } // namespace v8
787 
788 #include "src/objects/object-macros-undef.h"
789 
790 #endif // V8_OBJECTS_MAP_INL_H_
Definition: libplatform.h:13