5 #include "src/snapshot/deserializer-allocator.h" 7 #include "src/heap/heap-inl.h" 8 #include "src/snapshot/deserializer.h" 9 #include "src/snapshot/startup-deserializer.h" 14 DeserializerAllocator::DeserializerAllocator(Deserializer* deserializer)
15 : deserializer_(deserializer) {}
28 Address DeserializerAllocator::AllocateRaw(AllocationSpace space,
int size) {
29 if (space == LO_SPACE) {
30 AlwaysAllocateScope scope(isolate());
33 LargeObjectSpace* lo_space = isolate()->heap()->lo_space();
34 AllocationResult result = lo_space->AllocateRaw(size);
35 HeapObject* obj = result.ToObjectChecked();
36 deserialized_large_objects_.push_back(obj);
37 return obj->address();
38 }
else if (space == MAP_SPACE) {
39 DCHECK_EQ(Map::kSize, size);
40 return allocated_maps_[next_map_index_++];
42 DCHECK_LT(space, kNumberOfPreallocatedSpaces);
43 Address address = high_water_[space];
44 DCHECK_NE(address, kNullAddress);
45 high_water_[space] += size;
48 const Heap::Reservation& reservation = reservations_[space];
49 int chunk_index = current_chunk_[space];
50 DCHECK_LE(high_water_[space], reservation[chunk_index].end);
52 if (space == CODE_SPACE) SkipList::Update(address, size);
57 Address DeserializerAllocator::Allocate(AllocationSpace space,
int size) {
61 if (next_alignment_ != kWordAligned) {
62 const int reserved = size + Heap::GetMaximumFillToAlign(next_alignment_);
63 address = AllocateRaw(space, reserved);
64 obj = HeapObject::FromAddress(address);
68 Heap* heap = isolate()->heap();
69 DCHECK(ReadOnlyRoots(heap).free_space_map()->IsMap());
70 DCHECK(ReadOnlyRoots(heap).one_pointer_filler_map()->IsMap());
71 DCHECK(ReadOnlyRoots(heap).two_pointer_filler_map()->IsMap());
72 obj = heap->AlignWithFiller(obj, size, reserved, next_alignment_);
73 address = obj->address();
74 next_alignment_ = kWordAligned;
77 return AllocateRaw(space, size);
81 void DeserializerAllocator::MoveToNextChunk(AllocationSpace space) {
82 DCHECK_LT(space, kNumberOfPreallocatedSpaces);
83 uint32_t chunk_index = current_chunk_[space];
84 const Heap::Reservation& reservation = reservations_[space];
86 CHECK_EQ(reservation[chunk_index].end, high_water_[space]);
88 chunk_index = ++current_chunk_[space];
89 CHECK_LT(chunk_index, reservation.size());
90 high_water_[space] = reservation[chunk_index].start;
93 HeapObject* DeserializerAllocator::GetMap(
uint32_t index) {
94 DCHECK_LT(index, next_map_index_);
95 return HeapObject::FromAddress(allocated_maps_[index]);
98 HeapObject* DeserializerAllocator::GetLargeObject(
uint32_t index) {
99 DCHECK_LT(index, deserialized_large_objects_.size());
100 return deserialized_large_objects_[index];
103 HeapObject* DeserializerAllocator::GetObject(AllocationSpace space,
106 DCHECK_LT(space, kNumberOfPreallocatedSpaces);
107 DCHECK_LE(chunk_index, current_chunk_[space]);
108 Address address = reservations_[space][chunk_index].start + chunk_offset;
109 if (next_alignment_ != kWordAligned) {
110 int padding = Heap::GetFillToAlign(address, next_alignment_);
111 next_alignment_ = kWordAligned;
112 DCHECK(padding == 0 || HeapObject::FromAddress(address)->IsFiller());
115 return HeapObject::FromAddress(address);
118 void DeserializerAllocator::DecodeReservation(
119 const std::vector<SerializedData::Reservation>& res) {
120 DCHECK_EQ(0, reservations_[FIRST_SPACE].size());
121 int current_space = FIRST_SPACE;
122 for (
auto& r : res) {
123 reservations_[current_space].push_back(
124 {r.chunk_size(), kNullAddress, kNullAddress});
125 if (r.is_last()) current_space++;
127 DCHECK_EQ(kNumberOfSpaces, current_space);
128 for (
int i = 0;
i < kNumberOfPreallocatedSpaces;
i++) current_chunk_[
i] = 0;
131 bool DeserializerAllocator::ReserveSpace() {
133 for (
int i = FIRST_SPACE;
i < kNumberOfSpaces; ++
i) {
134 DCHECK_GT(reservations_[
i].size(), 0);
137 DCHECK(allocated_maps_.empty());
138 if (!isolate()->heap()->ReserveSpace(reservations_, &allocated_maps_)) {
141 for (
int i = 0;
i < kNumberOfPreallocatedSpaces;
i++) {
142 high_water_[
i] = reservations_[
i][0].start;
147 bool DeserializerAllocator::ReservationsAreFullyUsed()
const {
148 for (
int space = 0; space < kNumberOfPreallocatedSpaces; space++) {
149 const uint32_t chunk_index = current_chunk_[space];
150 if (reservations_[space].size() != chunk_index + 1) {
153 if (reservations_[space][chunk_index].end != high_water_[space]) {
157 return (allocated_maps_.size() == next_map_index_);
160 void DeserializerAllocator::RegisterDeserializedObjectsForBlackAllocation() {
161 isolate()->heap()->RegisterDeserializedObjectsForBlackAllocation(
162 reservations_, deserialized_large_objects_, allocated_maps_);
165 Isolate* DeserializerAllocator::isolate()
const {
166 return deserializer_->isolate();