5 #ifndef V8_OBJECTS_HEAP_OBJECT_INL_H_ 6 #define V8_OBJECTS_HEAP_OBJECT_INL_H_ 8 #include "src/objects/heap-object.h" 10 #include "src/heap/heap-write-barrier-inl.h" 13 #include "src/objects/object-macros.h" 18 #define TYPE_CHECK_FORWARDER(Type) \ 19 bool ObjectPtr::Is##Type() const { \ 20 return reinterpret_cast<Object*>(ptr())->Is##Type(); \ 22 HEAP_OBJECT_TYPE_LIST(TYPE_CHECK_FORWARDER)
23 TYPE_CHECK_FORWARDER(LayoutDescriptor);
24 TYPE_CHECK_FORWARDER(Primitive);
25 TYPE_CHECK_FORWARDER(Number);
26 TYPE_CHECK_FORWARDER(Numeric);
27 #undef TYPE_CHECK_FORWARDER 29 #define TYPE_CHECK_FORWARDER(NAME, Name, name) \ 30 bool ObjectPtr::Is##Name() const { \ 31 return reinterpret_cast<Object*>(ptr())->Is##Name(); \ 33 STRUCT_LIST(TYPE_CHECK_FORWARDER)
34 #undef TYPE_CHECK_FORWARDER 36 #define TYPE_CHECK_FORWARDER(Type, Value) \ 37 bool ObjectPtr::Is##Type(Isolate* isolate) const { \ 38 return reinterpret_cast<Object*>(ptr())->Is##Type(isolate); \ 40 bool ObjectPtr::Is##Type(ReadOnlyRoots roots) const { \ 41 return reinterpret_cast<Object*>(ptr())->Is##Type(roots); \ 43 bool ObjectPtr::Is##Type() const { \ 44 return reinterpret_cast<Object*>(ptr())->Is##Type(); \ 46 ODDBALL_LIST(TYPE_CHECK_FORWARDER)
47 #undef TYPE_CHECK_FORWARDER 49 bool ObjectPtr::IsHashTableBase()
const {
return IsHashTable(); }
51 bool ObjectPtr::IsSmallOrderedHashTable()
const {
52 return IsSmallOrderedHashSet() || IsSmallOrderedHashMap() ||
53 IsSmallOrderedNameDictionary();
56 double ObjectPtr::Number()
const {
57 return reinterpret_cast<Object*
>(ptr())->Number();
60 bool ObjectPtr::ToInt32(int32_t* value)
const {
61 return reinterpret_cast<Object*
>(ptr())->ToInt32(value);
64 bool ObjectPtr::ToUint32(
uint32_t* value)
const {
65 return reinterpret_cast<Object*
>(ptr())->ToUint32(value);
69 return reinterpret_cast<Object*
>(ptr())->FilterKey(filter);
72 Object* ObjectPtr::GetHash() {
73 return reinterpret_cast<Object*
>(ptr())->GetHash();
76 bool ObjectPtr::ToArrayIndex(
uint32_t* index)
const {
77 return reinterpret_cast<Object*
>(ptr())->ToArrayIndex(index);
80 void ObjectPtr::ShortPrint(FILE* out) {
81 return reinterpret_cast<Object*
>(ptr())->ShortPrint(out);
84 void ObjectPtr::Print() {
reinterpret_cast<Object*
>(ptr())->Print(); }
86 void ObjectPtr::Print(std::ostream& os) {
87 reinterpret_cast<Object*
>(ptr())->Print(os);
90 OBJECT_CONSTRUCTORS_IMPL(HeapObjectPtr, ObjectPtr)
91 HeapObjectPtr::HeapObjectPtr(Address ptr, AllowInlineSmiStorage allow_smi)
94 (allow_smi == AllowInlineSmiStorage::kAllowBeingASmi && IsSmi()) ||
98 #define TYPE_CHECK_FORWARDER(Type) \ 99 bool HeapObjectPtr::Is##Type() const { \ 100 return reinterpret_cast<HeapObject*>(ptr())->Is##Type(); \ 102 HEAP_OBJECT_TYPE_LIST(TYPE_CHECK_FORWARDER)
103 #undef TYPE_CHECK_FORWARDER 105 Map HeapObjectPtr::map()
const {
106 return Map::cast(READ_FIELD(
this, kMapOffset));
109 void HeapObjectPtr::set_map(Map value) {
110 reinterpret_cast<HeapObject*
>(ptr())->set_map(value);
113 void HeapObjectPtr::set_map_no_write_barrier(Map value) {
114 reinterpret_cast<HeapObject*
>(ptr())->set_map_no_write_barrier(value);
117 void HeapObjectPtr::set_map_after_allocation(Map value, WriteBarrierMode mode) {
118 reinterpret_cast<HeapObject*
>(ptr())->set_map_after_allocation(value, mode);
121 ObjectSlot HeapObjectPtr::map_slot() {
122 return ObjectSlot(FIELD_ADDR(
this, kMapOffset));
125 MapWord HeapObjectPtr::map_word()
const {
126 return MapWord(RELAXED_READ_FIELD(
this, kMapOffset).ptr());
129 void HeapObjectPtr::set_map_word(MapWord map_word) {
130 RELAXED_WRITE_FIELD(
this, kMapOffset,
131 reinterpret_cast<Object*>(map_word.value_));
134 void HeapObjectPtr::synchronized_set_map(Map value) {
135 if (!value.is_null()) {
137 Heap::FromWritableHeapObject(
this)->VerifyObjectLayoutChange(*
this, value);
140 synchronized_set_map_word(MapWord::FromMap(value));
141 if (!value.is_null()) {
144 MarkingBarrier(
this, ObjectSlot(kNullAddress), value);
148 void HeapObjectPtr::synchronized_set_map_word(MapWord map_word) {
149 RELEASE_WRITE_FIELD(
this, kMapOffset,
150 reinterpret_cast<Object*>(map_word.value_));
153 WriteBarrierMode HeapObjectPtr::GetWriteBarrierMode(
154 const DisallowHeapAllocation& promise) {
155 Heap* heap = Heap::FromWritableHeapObject(
this);
156 if (heap->incremental_marking()->IsMarking())
return UPDATE_WRITE_BARRIER;
157 if (Heap::InNewSpace(*
this))
return SKIP_WRITE_BARRIER;
158 return UPDATE_WRITE_BARRIER;
161 ReadOnlyRoots HeapObjectPtr::GetReadOnlyRoots()
const {
164 return ReadOnlyRoots(MemoryChunk::FromHeapObject(*this)->heap());
167 int HeapObjectPtr::Size()
const {
168 return reinterpret_cast<HeapObject*
>(ptr())->Size();
170 int HeapObjectPtr::SizeFromMap(Map map)
const {
171 return reinterpret_cast<HeapObject*
>(ptr())->SizeFromMap(map);
174 ObjectSlot HeapObjectPtr::RawField(
int byte_offset)
const {
175 return ObjectSlot(FIELD_ADDR(
this, byte_offset));
178 MaybeObjectSlot HeapObjectPtr::RawMaybeWeakField(
int byte_offset)
const {
179 return MaybeObjectSlot(FIELD_ADDR(
this, byte_offset));
182 Address HeapObjectPtr::GetFieldAddress(
int field_offset)
const {
183 return FIELD_ADDR(
this, field_offset);
186 Heap* NeverReadOnlySpaceObjectPtr::GetHeap(
const HeapObjectPtr
object) {
187 MemoryChunk* chunk = MemoryChunk::FromAddress(
object.ptr());
189 SLOW_DCHECK(chunk->owner()->identity() != RO_SPACE);
190 Heap* heap = chunk->heap();
191 SLOW_DCHECK(heap !=
nullptr);
195 Isolate* NeverReadOnlySpaceObjectPtr::GetIsolate(
const HeapObjectPtr
object) {
196 return GetHeap(
object)->isolate();
202 #include "src/objects/object-macros-undef.h" 204 #endif // V8_OBJECTS_HEAP_OBJECT_INL_H_