V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
heap-object-inl.h
1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_HEAP_OBJECT_INL_H_
6 #define V8_OBJECTS_HEAP_OBJECT_INL_H_
7 
8 #include "src/objects/heap-object.h"
9 
10 #include "src/heap/heap-write-barrier-inl.h"
11 
12 // Has to be the last include (doesn't have include guards):
13 #include "src/objects/object-macros.h"
14 
15 namespace v8 {
16 namespace internal {
17 
18 #define TYPE_CHECK_FORWARDER(Type) \
19  bool ObjectPtr::Is##Type() const { \
20  return reinterpret_cast<Object*>(ptr())->Is##Type(); \
21  }
22 HEAP_OBJECT_TYPE_LIST(TYPE_CHECK_FORWARDER)
23 TYPE_CHECK_FORWARDER(LayoutDescriptor);
24 TYPE_CHECK_FORWARDER(Primitive);
25 TYPE_CHECK_FORWARDER(Number);
26 TYPE_CHECK_FORWARDER(Numeric);
27 #undef TYPE_CHECK_FORWARDER
28 
29 #define TYPE_CHECK_FORWARDER(NAME, Name, name) \
30  bool ObjectPtr::Is##Name() const { \
31  return reinterpret_cast<Object*>(ptr())->Is##Name(); \
32  }
33 STRUCT_LIST(TYPE_CHECK_FORWARDER)
34 #undef TYPE_CHECK_FORWARDER
35 
36 #define TYPE_CHECK_FORWARDER(Type, Value) \
37  bool ObjectPtr::Is##Type(Isolate* isolate) const { \
38  return reinterpret_cast<Object*>(ptr())->Is##Type(isolate); \
39  } \
40  bool ObjectPtr::Is##Type(ReadOnlyRoots roots) const { \
41  return reinterpret_cast<Object*>(ptr())->Is##Type(roots); \
42  } \
43  bool ObjectPtr::Is##Type() const { \
44  return reinterpret_cast<Object*>(ptr())->Is##Type(); \
45  }
46 ODDBALL_LIST(TYPE_CHECK_FORWARDER)
47 #undef TYPE_CHECK_FORWARDER
48 
49 bool ObjectPtr::IsHashTableBase() const { return IsHashTable(); }
50 
51 bool ObjectPtr::IsSmallOrderedHashTable() const {
52  return IsSmallOrderedHashSet() || IsSmallOrderedHashMap() ||
53  IsSmallOrderedNameDictionary();
54 }
55 
56 double ObjectPtr::Number() const {
57  return reinterpret_cast<Object*>(ptr())->Number();
58 }
59 
60 bool ObjectPtr::ToInt32(int32_t* value) const {
61  return reinterpret_cast<Object*>(ptr())->ToInt32(value);
62 }
63 
64 bool ObjectPtr::ToUint32(uint32_t* value) const {
65  return reinterpret_cast<Object*>(ptr())->ToUint32(value);
66 }
67 
68 bool ObjectPtr::FilterKey(PropertyFilter filter) {
69  return reinterpret_cast<Object*>(ptr())->FilterKey(filter);
70 }
71 
72 Object* ObjectPtr::GetHash() {
73  return reinterpret_cast<Object*>(ptr())->GetHash();
74 }
75 
76 bool ObjectPtr::ToArrayIndex(uint32_t* index) const {
77  return reinterpret_cast<Object*>(ptr())->ToArrayIndex(index);
78 }
79 
80 void ObjectPtr::ShortPrint(FILE* out) {
81  return reinterpret_cast<Object*>(ptr())->ShortPrint(out);
82 }
83 
84 void ObjectPtr::Print() { reinterpret_cast<Object*>(ptr())->Print(); }
85 
86 void ObjectPtr::Print(std::ostream& os) {
87  reinterpret_cast<Object*>(ptr())->Print(os);
88 }
89 
90 OBJECT_CONSTRUCTORS_IMPL(HeapObjectPtr, ObjectPtr)
91 HeapObjectPtr::HeapObjectPtr(Address ptr, AllowInlineSmiStorage allow_smi)
92  : ObjectPtr(ptr) {
93  SLOW_DCHECK(
94  (allow_smi == AllowInlineSmiStorage::kAllowBeingASmi && IsSmi()) ||
95  IsHeapObject());
96 }
97 
98 #define TYPE_CHECK_FORWARDER(Type) \
99  bool HeapObjectPtr::Is##Type() const { \
100  return reinterpret_cast<HeapObject*>(ptr())->Is##Type(); \
101  }
102 HEAP_OBJECT_TYPE_LIST(TYPE_CHECK_FORWARDER)
103 #undef TYPE_CHECK_FORWARDER
104 
105 Map HeapObjectPtr::map() const {
106  return Map::cast(READ_FIELD(this, kMapOffset));
107 }
108 
109 void HeapObjectPtr::set_map(Map value) {
110  reinterpret_cast<HeapObject*>(ptr())->set_map(value);
111 }
112 
113 void HeapObjectPtr::set_map_no_write_barrier(Map value) {
114  reinterpret_cast<HeapObject*>(ptr())->set_map_no_write_barrier(value);
115 }
116 
117 void HeapObjectPtr::set_map_after_allocation(Map value, WriteBarrierMode mode) {
118  reinterpret_cast<HeapObject*>(ptr())->set_map_after_allocation(value, mode);
119 }
120 
121 ObjectSlot HeapObjectPtr::map_slot() {
122  return ObjectSlot(FIELD_ADDR(this, kMapOffset));
123 }
124 
125 MapWord HeapObjectPtr::map_word() const {
126  return MapWord(RELAXED_READ_FIELD(this, kMapOffset).ptr());
127 }
128 
129 void HeapObjectPtr::set_map_word(MapWord map_word) {
130  RELAXED_WRITE_FIELD(this, kMapOffset,
131  reinterpret_cast<Object*>(map_word.value_));
132 }
133 
134 void HeapObjectPtr::synchronized_set_map(Map value) {
135  if (!value.is_null()) {
136 #ifdef VERIFY_HEAP
137  Heap::FromWritableHeapObject(this)->VerifyObjectLayoutChange(*this, value);
138 #endif
139  }
140  synchronized_set_map_word(MapWord::FromMap(value));
141  if (!value.is_null()) {
142  // TODO(1600) We are passing kNullAddress as a slot because maps can never
143  // be on an evacuation candidate.
144  MarkingBarrier(this, ObjectSlot(kNullAddress), value);
145  }
146 }
147 
148 void HeapObjectPtr::synchronized_set_map_word(MapWord map_word) {
149  RELEASE_WRITE_FIELD(this, kMapOffset,
150  reinterpret_cast<Object*>(map_word.value_));
151 }
152 
153 WriteBarrierMode HeapObjectPtr::GetWriteBarrierMode(
154  const DisallowHeapAllocation& promise) {
155  Heap* heap = Heap::FromWritableHeapObject(this);
156  if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
157  if (Heap::InNewSpace(*this)) return SKIP_WRITE_BARRIER;
158  return UPDATE_WRITE_BARRIER;
159 }
160 
161 ReadOnlyRoots HeapObjectPtr::GetReadOnlyRoots() const {
162  // TODO(v8:7464): When RO_SPACE is embedded, this will access a global
163  // variable instead.
164  return ReadOnlyRoots(MemoryChunk::FromHeapObject(*this)->heap());
165 }
166 
167 int HeapObjectPtr::Size() const {
168  return reinterpret_cast<HeapObject*>(ptr())->Size();
169 }
170 int HeapObjectPtr::SizeFromMap(Map map) const {
171  return reinterpret_cast<HeapObject*>(ptr())->SizeFromMap(map);
172 }
173 
174 ObjectSlot HeapObjectPtr::RawField(int byte_offset) const {
175  return ObjectSlot(FIELD_ADDR(this, byte_offset));
176 }
177 
178 MaybeObjectSlot HeapObjectPtr::RawMaybeWeakField(int byte_offset) const {
179  return MaybeObjectSlot(FIELD_ADDR(this, byte_offset));
180 }
181 
182 Address HeapObjectPtr::GetFieldAddress(int field_offset) const {
183  return FIELD_ADDR(this, field_offset);
184 }
185 
186 Heap* NeverReadOnlySpaceObjectPtr::GetHeap(const HeapObjectPtr object) {
187  MemoryChunk* chunk = MemoryChunk::FromAddress(object.ptr());
188  // Make sure we are not accessing an object in RO space.
189  SLOW_DCHECK(chunk->owner()->identity() != RO_SPACE);
190  Heap* heap = chunk->heap();
191  SLOW_DCHECK(heap != nullptr);
192  return heap;
193 }
194 
195 Isolate* NeverReadOnlySpaceObjectPtr::GetIsolate(const HeapObjectPtr object) {
196  return GetHeap(object)->isolate();
197 }
198 
199 } // namespace internal
200 } // namespace v8
201 
202 #include "src/objects/object-macros-undef.h"
203 
204 #endif // V8_OBJECTS_HEAP_OBJECT_INL_H_
Definition: libplatform.h:13
PropertyFilter
Definition: v8.h:3185