5 #include "src/snapshot/deserializer.h" 7 #include "src/assembler-inl.h" 8 #include "src/heap/heap-write-barrier-inl.h" 9 #include "src/interpreter/interpreter.h" 10 #include "src/isolate.h" 12 #include "src/objects/api-callbacks.h" 13 #include "src/objects/hash-table.h" 14 #include "src/objects/js-array-buffer-inl.h" 15 #include "src/objects/js-array-inl.h" 16 #include "src/objects/maybe-object.h" 17 #include "src/objects/slots.h" 18 #include "src/objects/smi.h" 19 #include "src/objects/string.h" 20 #include "src/snapshot/natives.h" 21 #include "src/snapshot/snapshot.h" 34 : ptr_(reinterpret_cast<Address>(slot)) {}
36 : ptr_(reinterpret_cast<Address>(slot)) {}
39 return ptr_ < other.ptr_;
42 return ptr_ == other.ptr_;
45 inline void Advance(
int bytes = kPointerSize) { ptr_ += bytes; }
49 memcpy(&result, reinterpret_cast<void*>(ptr_),
sizeof(result));
54 memcpy(&result, reinterpret_cast<void*>(ptr_ - kPointerSize),
58 inline void Write(
Address value) {
59 memcpy(reinterpret_cast<void*>(ptr_), &value,
sizeof(value));
63 Address address() {
return ptr_; }
70 DCHECK(!allocator()->next_reference_is_weak());
71 dest.Write(value.ptr());
74 void Deserializer::UnalignedCopy(UnalignedSlot dest, Address value) {
75 DCHECK(!allocator()->next_reference_is_weak());
79 void Deserializer::Initialize(Isolate* isolate) {
80 DCHECK_NULL(isolate_);
81 DCHECK_NOT_NULL(isolate);
83 DCHECK_NULL(external_reference_table_);
84 external_reference_table_ = isolate->external_reference_table();
87 num_api_references_ = 0;
88 if (isolate_->api_external_references() !=
nullptr) {
89 while (isolate_->api_external_references()[num_api_references_] != 0) {
90 num_api_references_++;
94 CHECK_EQ(magic_number_,
95 SerializedData::ComputeMagicNumber(external_reference_table_));
98 void Deserializer::Rehash() {
99 DCHECK(can_rehash() || deserializing_user_code());
100 for (
const auto& item : to_rehash_) item->RehashBasedOnMap(isolate());
103 Deserializer::~Deserializer() {
106 if (source_.position() == 0)
return;
108 while (source_.HasMore()) DCHECK_EQ(kNop, source_.Get());
110 DCHECK(allocator()->ReservationsAreFullyUsed());
116 void Deserializer::VisitRootPointers(Root root,
const char* description,
117 ObjectSlot start, ObjectSlot end) {
120 ReadData(UnalignedSlot(start), UnalignedSlot(end), NEW_SPACE, kNullAddress);
123 void Deserializer::Synchronize(VisitorSynchronization::SyncTag tag) {
124 static const byte expected = kSynchronize;
125 CHECK_EQ(expected, source_.Get());
128 void Deserializer::DeserializeDeferredObjects() {
129 for (
int code = source_.Get(); code != kSynchronize; code = source_.Get()) {
131 case kAlignmentPrefix:
132 case kAlignmentPrefix + 1:
133 case kAlignmentPrefix + 2: {
134 int alignment = code - (SerializerDeserializer::kAlignmentPrefix - 1);
135 allocator()->SetAlignment(static_cast<AllocationAlignment>(alignment));
139 int space = code & kSpaceMask;
140 DCHECK_LE(space, kNumberOfSpaces);
141 DCHECK_EQ(code - space, kNewObject);
142 HeapObject*
object = GetBackReferencedObject(space);
143 int size = source_.GetInt() << kPointerSizeLog2;
144 Address obj_address =
object->address();
145 UnalignedSlot start(obj_address + kPointerSize);
146 UnalignedSlot end(obj_address + size);
147 bool filled = ReadData(start, end, space, obj_address);
149 DCHECK(CanBeDeferred(
object));
150 PostProcessNewObject(
object, space);
156 void Deserializer::LogNewObjectEvents() {
160 DisallowHeapAllocation no_gc;
162 LOG_CODE_EVENT(isolate_, LogCodeObjects());
164 LOG_CODE_EVENT(isolate_, LogCompiledFunctions());
168 void Deserializer::LogNewMapEvents() {
169 DisallowHeapAllocation no_gc;
170 for (Map map : new_maps()) {
171 DCHECK(FLAG_trace_maps);
172 LOG(isolate_, MapCreate(map));
173 LOG(isolate_, MapDetails(map));
177 void Deserializer::LogScriptEvents(Script* script) {
178 DisallowHeapAllocation no_gc;
180 ScriptEvent(Logger::ScriptEventType::kDeserialize, script->id()));
181 LOG(isolate_, ScriptDetails(script));
184 StringTableInsertionKey::StringTableInsertionKey(String
string)
185 : StringTableKey(ComputeHashField(string)), string_(string) {
186 DCHECK(string->IsInternalizedString());
189 bool StringTableInsertionKey::IsMatch(Object*
string) {
192 if (Hash() != String::cast(
string)->Hash())
return false;
194 return string_->SlowEquals(String::cast(
string));
197 Handle<String> StringTableInsertionKey::AsHandle(Isolate* isolate) {
198 return handle(string_, isolate);
201 uint32_t StringTableInsertionKey::ComputeHashField(String
string) {
204 return string->hash_field();
207 HeapObject* Deserializer::PostProcessNewObject(HeapObject* obj,
int space) {
208 if ((FLAG_rehash_snapshot && can_rehash_) || deserializing_user_code()) {
209 if (obj->IsString()) {
211 String
string = String::cast(obj);
212 string->set_hash_field(String::kEmptyHashField);
213 }
else if (obj->NeedsRehashing()) {
214 to_rehash_.push_back(obj);
218 if (deserializing_user_code()) {
219 if (obj->IsString()) {
220 String
string = String::cast(obj);
221 if (string->IsInternalizedString()) {
224 StringTableInsertionKey key(
string);
226 StringTable::ForwardStringIfExists(isolate_, &key,
string);
228 if (!canonical.is_null())
return canonical;
230 new_internalized_strings_.push_back(handle(
string, isolate_));
233 }
else if (obj->IsScript()) {
234 new_scripts_.push_back(handle(Script::cast(obj), isolate_));
235 }
else if (obj->IsAllocationSite()) {
240 new_allocation_sites_.push_back(AllocationSite::cast(obj));
242 DCHECK(CanBeDeferred(obj));
245 if (obj->IsScript()) {
246 LogScriptEvents(Script::cast(obj));
247 }
else if (obj->IsCode()) {
251 if (deserializing_user_code() || space == LO_SPACE) {
252 new_code_objects_.push_back(Code::cast(obj));
254 }
else if (FLAG_trace_maps && obj->IsMap()) {
257 new_maps_.push_back(Map::cast(obj));
258 }
else if (obj->IsAccessorInfo()) {
260 accessor_infos_.push_back(AccessorInfo::cast(obj));
262 }
else if (obj->IsCallHandlerInfo()) {
264 call_handler_infos_.push_back(CallHandlerInfo::cast(obj));
266 }
else if (obj->IsExternalString()) {
267 if (obj->map() == ReadOnlyRoots(isolate_).native_source_string_map()) {
268 ExternalOneByteString
string = ExternalOneByteString::cast(obj);
269 DCHECK(string->is_uncached());
271 isolate_, NativesExternalStringResource::DecodeForDeserialization(
272 string->resource()));
274 ExternalString
string = ExternalString::cast(obj);
275 uint32_t index =
string->resource_as_uint32();
277 static_cast<Address
>(isolate_->api_external_references()[index]);
278 string->set_address_as_resource(address);
279 isolate_->heap()->UpdateExternalString(
string, 0,
280 string->ExternalPayloadSize());
282 isolate_->heap()->RegisterExternalString(String::cast(obj));
283 }
else if (obj->IsJSTypedArray()) {
284 JSTypedArray* typed_array = JSTypedArray::cast(obj);
285 CHECK_LE(typed_array->byte_offset(), Smi::kMaxValue);
286 int32_t byte_offset =
static_cast<int32_t
>(typed_array->byte_offset());
287 if (byte_offset > 0) {
288 FixedTypedArrayBase elements =
289 FixedTypedArrayBase::cast(typed_array->elements());
291 DCHECK(!typed_array->is_on_heap());
293 void* pointer_with_offset =
reinterpret_cast<void*
>(
294 reinterpret_cast<intptr_t
>(elements->external_pointer()) +
296 elements->set_external_pointer(pointer_with_offset);
298 }
else if (obj->IsJSArrayBuffer()) {
299 JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
301 if (buffer->backing_store() !=
nullptr) {
302 Smi store_index(reinterpret_cast<Address>(buffer->backing_store()));
303 void* backing_store = off_heap_backing_stores_[store_index->value()];
305 buffer->set_backing_store(backing_store);
306 isolate_->heap()->RegisterNewArrayBuffer(buffer);
308 }
else if (obj->IsFixedTypedArrayBase()) {
309 FixedTypedArrayBase fta = FixedTypedArrayBase::cast(obj);
311 if (fta->base_pointer() == Smi::kZero) {
312 Smi store_index(reinterpret_cast<Address>(fta->external_pointer()));
313 void* backing_store = off_heap_backing_stores_[store_index->value()];
314 fta->set_external_pointer(backing_store);
316 }
else if (obj->IsBytecodeArray()) {
319 BytecodeArray bytecode_array = BytecodeArray::cast(obj);
320 bytecode_array->set_interrupt_budget(
321 interpreter::Interpreter::InterruptBudget());
322 bytecode_array->set_osr_loop_nesting_level(0);
326 DCHECK_EQ(0, Heap::GetFillToAlign(obj->address(),
327 HeapObject::RequiredAlignment(obj->map())));
331 HeapObject* Deserializer::GetBackReferencedObject(
int space) {
335 obj = allocator()->GetLargeObject(source_.GetInt());
338 obj = allocator()->GetMap(source_.GetInt());
341 uint32_t chunk_index = source_.GetInt();
342 uint32_t chunk_offset = source_.GetInt();
343 if (isolate()->heap()->deserialization_complete()) {
344 PagedSpace* read_only_space = isolate()->heap()->read_only_space();
345 Page* page = read_only_space->first_page();
347 page = page->next_page();
349 Address address = page->OffsetToAddress(chunk_offset);
350 obj = HeapObject::FromAddress(address);
352 obj = allocator()->GetObject(static_cast<AllocationSpace>(space),
353 chunk_index, chunk_offset);
358 uint32_t chunk_index = source_.GetInt();
359 uint32_t chunk_offset = source_.GetInt();
360 obj = allocator()->GetObject(static_cast<AllocationSpace>(space),
361 chunk_index, chunk_offset);
366 if (deserializing_user_code() && obj->IsThinString()) {
367 obj = ThinString::cast(obj)->actual();
370 hot_objects_.Add(obj);
371 DCHECK(!HasWeakHeapObjectTag(obj));
379 void Deserializer::ReadObject(
int space_number, UnalignedSlot write_back,
380 HeapObjectReferenceType reference_type) {
381 const int size = source_.GetInt() << kObjectAlignmentBits;
384 allocator()->Allocate(static_cast<AllocationSpace>(space_number), size);
385 HeapObject* obj = HeapObject::FromAddress(address);
387 isolate_->heap()->OnAllocationEvent(obj, size);
388 UnalignedSlot current(address);
389 UnalignedSlot limit(address + size);
391 if (ReadData(current, limit, space_number, address)) {
393 obj = PostProcessNewObject(obj, space_number);
396 MaybeObject write_back_obj = reference_type == HeapObjectReferenceType::STRONG
397 ? HeapObjectReference::Strong(obj)
398 : HeapObjectReference::Weak(obj);
399 UnalignedCopy(write_back, write_back_obj);
402 DCHECK(space_number == CODE_SPACE || space_number == CODE_LO_SPACE);
404 DCHECK(space_number != CODE_SPACE && space_number != CODE_LO_SPACE);
409 static void NoExternalReferencesCallback() {
414 CHECK_WITH_MSG(
false,
"No external references provided via API");
417 bool Deserializer::ReadData(UnalignedSlot current, UnalignedSlot limit,
418 int source_space, Address current_object_address) {
419 Isolate*
const isolate = isolate_;
423 bool write_barrier_needed =
424 (current_object_address != kNullAddress && source_space != NEW_SPACE &&
425 source_space != CODE_SPACE);
426 while (current < limit) {
427 byte data = source_.Get();
429 #define CASE_STATEMENT(where, how, within, space_number) \ 430 case where + how + within + space_number: \ 431 STATIC_ASSERT((where & ~kWhereMask) == 0); \ 432 STATIC_ASSERT((how & ~kHowToCodeMask) == 0); \ 433 STATIC_ASSERT((within & ~kWhereToPointMask) == 0); \ 434 STATIC_ASSERT((space_number & ~kSpaceMask) == 0); 436 #define CASE_BODY(where, how, within, space_number_if_any) \ 437 current = ReadDataCase<where, how, within, space_number_if_any>( \ 438 isolate, current, current_object_address, data, write_barrier_needed); \ 444 #define ALL_SPACES(where, how, within) \ 445 CASE_STATEMENT(where, how, within, NEW_SPACE) \ 446 CASE_BODY(where, how, within, NEW_SPACE) \ 447 CASE_STATEMENT(where, how, within, OLD_SPACE) \ 449 CASE_STATEMENT(where, how, within, CODE_SPACE) \ 451 CASE_STATEMENT(where, how, within, MAP_SPACE) \ 453 CASE_STATEMENT(where, how, within, LO_SPACE) \ 455 CASE_STATEMENT(where, how, within, RO_SPACE) \ 456 CASE_BODY(where, how, within, kAnyOldSpace) 458 #define FOUR_CASES(byte_code) \ 460 case byte_code + 1: \ 461 case byte_code + 2: \ 464 #define SIXTEEN_CASES(byte_code) \ 465 FOUR_CASES(byte_code) \ 466 FOUR_CASES(byte_code + 4) \ 467 FOUR_CASES(byte_code + 8) \ 468 FOUR_CASES(byte_code + 12) 470 #define SINGLE_CASE(where, how, within, space) \ 471 CASE_STATEMENT(where, how, within, space) \ 472 CASE_BODY(where, how, within, space) 476 ALL_SPACES(kNewObject, kPlain, kStartOfObject)
479 ALL_SPACES(kNewObject, kFromCode, kInnerPointer)
482 ALL_SPACES(kBackref, kPlain, kStartOfObject)
483 ALL_SPACES(kBackrefWithSkip, kPlain, kStartOfObject)
484 #if V8_CODE_EMBEDS_OBJECT_POINTER 489 ALL_SPACES(kNewObject, kFromCode, kStartOfObject)
494 ALL_SPACES(kBackref, kFromCode, kStartOfObject)
495 ALL_SPACES(kBackrefWithSkip, kFromCode, kStartOfObject)
501 ALL_SPACES(kBackref, kFromCode, kInnerPointer)
502 ALL_SPACES(kBackrefWithSkip, kFromCode, kInnerPointer)
505 SINGLE_CASE(kRootArray, kPlain, kStartOfObject, 0)
506 #if V8_CODE_EMBEDS_OBJECT_POINTER 508 SINGLE_CASE(kRootArray, kFromCode, kStartOfObject, 0)
512 SINGLE_CASE(kPartialSnapshotCache, kPlain, kStartOfObject, 0)
513 SINGLE_CASE(kPartialSnapshotCache, kFromCode, kStartOfObject, 0)
514 SINGLE_CASE(kPartialSnapshotCache, kFromCode, kInnerPointer, 0)
517 SINGLE_CASE(kReadOnlyObjectCache, kPlain, kStartOfObject, 0)
518 SINGLE_CASE(kReadOnlyObjectCache, kFromCode, kStartOfObject, 0)
519 SINGLE_CASE(kReadOnlyObjectCache, kFromCode, kInnerPointer, 0)
522 SINGLE_CASE(kAttachedReference, kPlain, kStartOfObject, 0)
523 SINGLE_CASE(kAttachedReference, kFromCode, kStartOfObject, 0)
524 SINGLE_CASE(kAttachedReference, kFromCode, kInnerPointer, 0)
526 #undef CASE_STATEMENT 531 int size = source_.GetInt();
532 current.Advance(size);
538 case kExternalReference + kPlain + kStartOfObject:
540 ReadExternalReferenceCase(kPlain, current, current_object_address);
544 case kExternalReference + kFromCode + kStartOfObject:
545 current = ReadExternalReferenceCase(kFromCode, current,
546 current_object_address);
549 case kInternalReferenceEncoded:
550 case kInternalReference: {
553 int pc_offset = source_.GetInt();
554 int target_offset = source_.GetInt();
555 Code code = Code::cast(HeapObject::FromAddress(current_object_address));
556 DCHECK(0 <= pc_offset && pc_offset <= code->raw_instruction_size());
557 DCHECK(0 <= target_offset &&
558 target_offset <= code->raw_instruction_size());
559 Address pc = code->entry() + pc_offset;
560 Address target = code->entry() + target_offset;
561 Assembler::deserialization_set_target_internal_reference_at(
563 data == kInternalReference ? RelocInfo::INTERNAL_REFERENCE
564 : RelocInfo::INTERNAL_REFERENCE_ENCODED);
568 case kOffHeapTarget: {
569 DCHECK(FLAG_embedded_builtins);
570 int skip = source_.GetInt();
571 int builtin_index = source_.GetInt();
572 DCHECK(Builtins::IsBuiltinId(builtin_index));
574 current.Advance(skip);
576 CHECK_NOT_NULL(isolate->embedded_blob());
577 EmbeddedData d = EmbeddedData::FromBlob();
578 Address address = d.InstructionStartOfBuiltin(builtin_index);
579 CHECK_NE(kNullAddress, address);
581 if (RelocInfo::OffHeapTargetIsCodedSpecially()) {
582 Address location_of_branch_data = current.address();
583 int skip = Assembler::deserialization_special_target_size(
584 location_of_branch_data);
585 Assembler::deserialization_set_special_target_at(
586 location_of_branch_data,
587 Code::cast(HeapObject::FromAddress(current_object_address)),
589 current.Advance(skip);
591 UnalignedCopy(current, address);
601 int space = source_.Get();
602 allocator()->MoveToNextChunk(static_cast<AllocationSpace>(space));
608 DCHECK_EQ(current.address(), current_object_address + kPointerSize);
609 HeapObject* obj = HeapObject::FromAddress(current_object_address);
612 if (obj->IsMap()) Map::cast(obj)->set_instance_type(FILLER_TYPE);
623 case kVariableRawData: {
624 int size_in_bytes = source_.GetInt();
625 byte* raw_data_out =
reinterpret_cast<byte*
>(current.address());
626 source_.CopyRaw(raw_data_out, size_in_bytes);
627 current.Advance(size_in_bytes);
633 case kVariableRawCode: {
634 int size_in_bytes = source_.GetInt();
636 reinterpret_cast<byte*>(current_object_address + Code::kDataStart),
641 case kVariableRepeat: {
642 int repeats = source_.GetInt();
643 MaybeObject
object = current.ReadPrevious();
644 DCHECK(!Heap::InNewSpace(
object));
645 for (
int i = 0;
i < repeats;
i++) {
646 UnalignedCopy(current,
object);
652 case kOffHeapBackingStore: {
653 int byte_length = source_.GetInt();
654 byte* backing_store =
static_cast<byte*
>(
655 isolate->array_buffer_allocator()->AllocateUninitialized(
657 CHECK_NOT_NULL(backing_store);
658 source_.CopyRaw(backing_store, byte_length);
659 off_heap_backing_stores_.push_back(backing_store);
663 case kApiReference: {
664 int skip = source_.GetInt();
665 current.Advance(skip);
668 if (isolate->api_external_references()) {
670 reference_id < num_api_references_,
671 "too few external references provided through the API");
672 address =
static_cast<Address
>(
673 isolate->api_external_references()[reference_id]);
675 address =
reinterpret_cast<Address
>(NoExternalReferencesCallback);
677 UnalignedCopy(current, address);
682 case kClearedWeakReference:
683 UnalignedCopy(current, HeapObjectReference::ClearedValue(isolate_));
688 DCHECK(!allocator()->next_reference_is_weak());
689 allocator()->set_next_reference_is_weak(
true);
692 case kAlignmentPrefix:
693 case kAlignmentPrefix + 1:
694 case kAlignmentPrefix + 2: {
695 int alignment = data - (SerializerDeserializer::kAlignmentPrefix - 1);
696 allocator()->SetAlignment(static_cast<AllocationAlignment>(alignment));
703 static_cast<int>(RootIndex::kFirstImmortalImmovableRoot) == 0);
704 STATIC_ASSERT(kNumberOfRootArrayConstants <=
705 static_cast<int>(RootIndex::kLastImmortalImmovableRoot));
706 STATIC_ASSERT(kNumberOfRootArrayConstants == 32);
707 SIXTEEN_CASES(kRootArrayConstantsWithSkip)
708 SIXTEEN_CASES(kRootArrayConstantsWithSkip + 16) {
709 int skip = source_.GetInt();
710 current.Advance(skip);
714 SIXTEEN_CASES(kRootArrayConstants)
715 SIXTEEN_CASES(kRootArrayConstants + 16) {
716 int id = data & kRootArrayConstantsMask;
717 RootIndex root_index =
static_cast<RootIndex
>(id);
718 MaybeObject
object = MaybeObject::FromObject(isolate->root(root_index));
719 DCHECK(!Heap::InNewSpace(
object));
720 UnalignedCopy(current,
object);
725 STATIC_ASSERT(kNumberOfHotObjects == 8);
726 FOUR_CASES(kHotObjectWithSkip)
727 FOUR_CASES(kHotObjectWithSkip + 4) {
728 int skip = source_.GetInt();
729 current.Advance(skip);
733 FOUR_CASES(kHotObject)
734 FOUR_CASES(kHotObject + 4) {
735 int index = data & kHotObjectMask;
736 Object* hot_object = hot_objects_.Get(index);
737 MaybeObject hot_maybe_object = MaybeObject::FromObject(hot_object);
738 if (allocator()->GetAndClearNextReferenceIsWeak()) {
739 hot_maybe_object = MaybeObject::MakeWeak(hot_maybe_object);
742 UnalignedCopy(current, hot_maybe_object);
743 if (write_barrier_needed && Heap::InNewSpace(hot_object)) {
744 GenerationalBarrier(HeapObject::FromAddress(current_object_address),
745 current.Slot(), hot_maybe_object);
752 STATIC_ASSERT(kNumberOfFixedRawData == 32);
753 SIXTEEN_CASES(kFixedRawData)
754 SIXTEEN_CASES(kFixedRawData + 16) {
755 byte* raw_data_out =
reinterpret_cast<byte*
>(current.address());
756 int size_in_bytes = (data - kFixedRawDataStart) << kPointerSizeLog2;
757 source_.CopyRaw(raw_data_out, size_in_bytes);
758 current.Advance(size_in_bytes);
762 STATIC_ASSERT(kNumberOfFixedRepeat == 16);
763 SIXTEEN_CASES(kFixedRepeat) {
764 int repeats = data - kFixedRepeatStart;
765 MaybeObject
object = current.ReadPrevious();
766 DCHECK(!Heap::InNewSpace(
object));
767 for (
int i = 0;
i < repeats;
i++) {
768 UnalignedCopy(current,
object);
775 #define UNUSED_CASE(byte_code) \ 778 UNUSED_SERIALIZER_BYTE_CODES(UNUSED_CASE)
787 CHECK_EQ(limit, current);
791 UnalignedSlot Deserializer::ReadExternalReferenceCase(
792 HowToCode how, UnalignedSlot current, Address current_object_address) {
793 int skip = source_.GetInt();
794 current.Advance(skip);
796 Address address = external_reference_table_->address(reference_id);
798 if (how == kFromCode) {
799 Address location_of_branch_data = current.address();
801 Assembler::deserialization_special_target_size(location_of_branch_data);
802 Assembler::deserialization_set_special_target_at(
803 location_of_branch_data,
804 Code::cast(HeapObject::FromAddress(current_object_address)), address);
805 current.Advance(skip);
807 UnalignedCopy(current, address);
813 template <
int where,
int how,
int within,
int space_number_if_any>
814 UnalignedSlot Deserializer::ReadDataCase(Isolate* isolate,
815 UnalignedSlot current,
816 Address current_object_address,
817 byte data,
bool write_barrier_needed) {
818 bool emit_write_barrier =
false;
819 bool current_was_incremented =
false;
820 int space_number = space_number_if_any == kAnyOldSpace ? (data & kSpaceMask)
821 : space_number_if_any;
822 HeapObjectReferenceType reference_type = HeapObjectReferenceType::STRONG;
823 if (where == kNewObject && how == kPlain && within == kStartOfObject) {
824 if (allocator()->GetAndClearNextReferenceIsWeak()) {
825 reference_type = HeapObjectReferenceType::WEAK;
827 ReadObject(space_number, current, reference_type);
828 emit_write_barrier = (space_number == NEW_SPACE);
830 Object* new_object =
nullptr;
831 if (where == kNewObject) {
832 ReadObject(space_number, UnalignedSlot(&new_object),
833 HeapObjectReferenceType::STRONG);
834 }
else if (where == kBackref) {
835 emit_write_barrier = (space_number == NEW_SPACE);
836 new_object = GetBackReferencedObject(data & kSpaceMask);
837 }
else if (where == kBackrefWithSkip) {
838 int skip = source_.GetInt();
839 current.Advance(skip);
840 emit_write_barrier = (space_number == NEW_SPACE);
841 new_object = GetBackReferencedObject(data & kSpaceMask);
842 }
else if (where == kRootArray) {
843 int id = source_.GetInt();
844 RootIndex root_index =
static_cast<RootIndex
>(id);
845 new_object = isolate->root(root_index);
846 emit_write_barrier = Heap::InNewSpace(new_object);
847 hot_objects_.Add(HeapObject::cast(new_object));
848 }
else if (where == kReadOnlyObjectCache) {
849 int cache_index = source_.GetInt();
850 new_object = isolate->read_only_object_cache()->at(cache_index);
851 DCHECK(!Heap::InNewSpace(new_object));
852 emit_write_barrier =
false;
853 }
else if (where == kPartialSnapshotCache) {
854 int cache_index = source_.GetInt();
855 new_object = isolate->partial_snapshot_cache()->at(cache_index);
856 emit_write_barrier = Heap::InNewSpace(new_object);
858 DCHECK_EQ(where, kAttachedReference);
859 int index = source_.GetInt();
860 new_object = *attached_objects_[index];
861 emit_write_barrier = Heap::InNewSpace(new_object);
863 if (within == kInnerPointer) {
864 DCHECK_EQ(how, kFromCode);
865 if (new_object->IsCode()) {
866 new_object =
reinterpret_cast<Object*
>(
867 Code::cast(new_object)->raw_instruction_start());
869 Cell* cell = Cell::cast(new_object);
870 new_object =
reinterpret_cast<Object*
>(cell->ValueAddress());
873 if (how == kFromCode) {
874 DCHECK(!allocator()->next_reference_is_weak());
875 Address location_of_branch_data = current.address();
876 int skip = Assembler::deserialization_special_target_size(
877 location_of_branch_data);
878 Assembler::deserialization_set_special_target_at(
879 location_of_branch_data,
880 Code::cast(HeapObject::FromAddress(current_object_address)),
881 reinterpret_cast<Address>(new_object));
882 current.Advance(skip);
883 current_was_incremented =
true;
885 MaybeObject new_maybe_object = MaybeObject::FromObject(new_object);
886 if (allocator()->GetAndClearNextReferenceIsWeak()) {
887 new_maybe_object = MaybeObject::MakeWeak(new_maybe_object);
889 UnalignedCopy(current, new_maybe_object);
892 if (emit_write_barrier && write_barrier_needed) {
893 HeapObject*
object = HeapObject::FromAddress(current_object_address);
894 SLOW_DCHECK(isolate->heap()->Contains(
object));
895 GenerationalBarrier(
object, current.Slot(), current.Read());
897 if (!current_was_incremented) {