5 #include "src/compiler/backend/register-allocator.h" 9 #include "src/assembler-inl.h" 10 #include "src/base/adapters.h" 11 #include "src/compiler/linkage.h" 12 #include "src/string-stream.h" 20 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \ 25 static constexpr
int kFloat32Bit =
26 RepresentationBit(MachineRepresentation::kFloat32);
27 static constexpr
int kSimd128Bit =
28 RepresentationBit(MachineRepresentation::kSimd128);
30 int GetRegisterCount(
const RegisterConfiguration* cfg, RegisterKind kind) {
31 return kind == FP_REGISTERS ? cfg->num_double_registers()
32 : cfg->num_general_registers();
35 int GetAllocatableRegisterCount(
const RegisterConfiguration* cfg,
37 return kind == FP_REGISTERS ? cfg->num_allocatable_double_registers()
38 : cfg->num_allocatable_general_registers();
41 const int* GetAllocatableRegisterCodes(
const RegisterConfiguration* cfg,
43 return kind == FP_REGISTERS ? cfg->allocatable_double_codes()
44 : cfg->allocatable_general_codes();
47 const InstructionBlock* GetContainingLoop(
const InstructionSequence* sequence,
48 const InstructionBlock* block) {
49 RpoNumber index = block->loop_header();
50 if (!index.IsValid())
return nullptr;
51 return sequence->InstructionBlockAt(index);
54 const InstructionBlock* GetInstructionBlock(
const InstructionSequence* code,
55 LifetimePosition pos) {
56 return code->GetInstructionBlock(pos.ToInstructionIndex());
59 Instruction* GetLastInstruction(InstructionSequence* code,
60 const InstructionBlock* block) {
61 return code->InstructionAt(block->last_instruction_index());
65 int GetByteWidth(MachineRepresentation rep) {
67 case MachineRepresentation::kBit:
68 case MachineRepresentation::kWord8:
69 case MachineRepresentation::kWord16:
70 case MachineRepresentation::kWord32:
71 case MachineRepresentation::kTaggedSigned:
72 case MachineRepresentation::kTaggedPointer:
73 case MachineRepresentation::kTagged:
74 case MachineRepresentation::kFloat32:
76 case MachineRepresentation::kWord64:
77 case MachineRepresentation::kFloat64:
79 case MachineRepresentation::kSimd128:
81 case MachineRepresentation::kNone:
92 : range_(range), start_(range->Start()), end_(range->End()), skip_(skip) {
93 DCHECK(!range->IsEmpty());
97 return start_ <= position && position < end_;
118 bool ShouldInitialize() {
return start_ ==
nullptr; }
121 length_ = range->GetChildCount();
128 for (
LiveRange *
i = range;
i !=
nullptr;
i =
i->next(), ++curr) {
134 size_t left_index = 0;
135 size_t right_index = length_;
137 size_t current_index = left_index + (right_index - left_index) / 2;
138 DCHECK(right_index > current_index);
140 if (bound->start_ <= position) {
141 if (position < bound->end_)
return bound;
142 DCHECK(left_index < current_index);
143 left_index = current_index;
145 right_index = current_index;
152 LifetimePosition::InstructionFromInstructionIndex(
153 pred->last_instruction_index());
154 return Find(pred_end);
159 succ->first_instruction_index());
160 return Find(succ_start);
167 LifetimePosition::InstructionFromInstructionIndex(
168 pred->last_instruction_index());
170 result->pred_cover_ = bound->range_;
172 block->first_instruction_index());
174 if (bound->CanCover(cur_start)) {
179 bound = Find(cur_start);
183 result->cur_cover_ = bound->range_;
184 DCHECK(result->pred_cover_ !=
nullptr && result->cur_cover_ !=
nullptr);
185 return (result->cur_cover_ != result->pred_cover_);
199 bounds_length_(static_cast<int>(data_->live_ranges().size())),
202 for (
int i = 0;
i < bounds_length_; ++
i) {
208 DCHECK(operand_index < bounds_length_);
210 DCHECK(range !=
nullptr && !range->IsEmpty());
212 if (array->ShouldInitialize()) {
213 array->Initialize(zone_, range);
220 const int bounds_length_;
227 typedef std::pair<ParallelMove*, InstructionOperand> DelayedInsertionMapKey;
230 bool operator()(
const DelayedInsertionMapKey& a,
231 const DelayedInsertionMapKey& b)
const {
232 if (a.first == b.first) {
233 return a.second.Compare(b.second);
235 return a.first < b.first;
244 void* hint, UsePositionHintType hint_type)
245 : operand_(operand), hint_(hint), next_(nullptr), pos_(pos), flags_(0) {
246 DCHECK_IMPLIES(hint ==
nullptr, hint_type == UsePositionHintType::kNone);
247 bool register_beneficial =
true;
248 UsePositionType
type = UsePositionType::kRegisterOrSlot;
249 if (operand_ !=
nullptr && operand_->IsUnallocated()) {
251 if (unalloc->HasRegisterPolicy()) {
252 type = UsePositionType::kRequiresRegister;
253 }
else if (unalloc->HasSlotPolicy()) {
254 type = UsePositionType::kRequiresSlot;
255 register_beneficial =
false;
256 }
else if (unalloc->HasRegisterOrSlotOrConstantPolicy()) {
257 type = UsePositionType::kRegisterOrSlotOrConstant;
258 register_beneficial =
false;
260 register_beneficial = !unalloc->HasRegisterOrSlotPolicy();
263 flags_ = TypeField::encode(type) | HintTypeField::encode(hint_type) |
264 RegisterBeneficialField::encode(register_beneficial) |
265 AssignedRegisterField::encode(kUnassignedRegister);
266 DCHECK(pos_.IsValid());
269 bool UsePosition::HasHint()
const {
271 return HintRegister(&hint_register);
274 bool UsePosition::HintRegister(
int* register_code)
const {
275 if (hint_ ==
nullptr)
return false;
276 switch (HintTypeField::decode(flags_)) {
277 case UsePositionHintType::kNone:
278 case UsePositionHintType::kUnresolved:
280 case UsePositionHintType::kUsePos: {
281 UsePosition* use_pos =
reinterpret_cast<UsePosition*
>(hint_);
282 int assigned_register = AssignedRegisterField::decode(use_pos->flags_);
283 if (assigned_register == kUnassignedRegister)
return false;
284 *register_code = assigned_register;
287 case UsePositionHintType::kOperand: {
288 InstructionOperand* operand =
289 reinterpret_cast<InstructionOperand*
>(hint_);
290 *register_code = LocationOperand::cast(operand)->register_code();
293 case UsePositionHintType::kPhi: {
294 RegisterAllocationData::PhiMapValue* phi =
295 reinterpret_cast<RegisterAllocationData::PhiMapValue*
>(hint_);
296 int assigned_register = phi->assigned_register();
297 if (assigned_register == kUnassignedRegister)
return false;
298 *register_code = assigned_register;
305 UsePositionHintType UsePosition::HintTypeForOperand(
306 const InstructionOperand& op) {
308 case InstructionOperand::CONSTANT:
309 case InstructionOperand::IMMEDIATE:
310 case InstructionOperand::EXPLICIT:
311 return UsePositionHintType::kNone;
312 case InstructionOperand::UNALLOCATED:
313 return UsePositionHintType::kUnresolved;
314 case InstructionOperand::ALLOCATED:
315 if (op.IsRegister() || op.IsFPRegister()) {
316 return UsePositionHintType::kOperand;
318 DCHECK(op.IsStackSlot() || op.IsFPStackSlot());
319 return UsePositionHintType::kNone;
321 case InstructionOperand::INVALID:
327 void UsePosition::SetHint(UsePosition* use_pos) {
328 DCHECK_NOT_NULL(use_pos);
330 flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
333 void UsePosition::ResolveHint(UsePosition* use_pos) {
334 DCHECK_NOT_NULL(use_pos);
335 if (HintTypeField::decode(flags_) != UsePositionHintType::kUnresolved)
return;
337 flags_ = HintTypeField::update(flags_, UsePositionHintType::kUsePos);
340 void UsePosition::set_type(UsePositionType type,
bool register_beneficial) {
341 DCHECK_IMPLIES(type == UsePositionType::kRequiresSlot, !register_beneficial);
342 DCHECK_EQ(kUnassignedRegister, AssignedRegisterField::decode(flags_));
343 flags_ = TypeField::encode(type) |
344 RegisterBeneficialField::encode(register_beneficial) |
345 HintTypeField::encode(HintTypeField::decode(flags_)) |
346 AssignedRegisterField::encode(kUnassignedRegister);
349 UseInterval* UseInterval::SplitAt(LifetimePosition pos, Zone* zone) {
350 DCHECK(Contains(pos) && pos != start());
351 UseInterval* after =
new (zone) UseInterval(pos, end_);
352 after->next_ = next_;
358 void LifetimePosition::Print()
const { StdoutStream{} << *
this << std::endl; }
360 std::ostream& operator<<(std::ostream& os,
const LifetimePosition pos) {
361 os <<
'@' << pos.ToInstructionIndex();
362 if (pos.IsGapPosition()) {
375 LiveRange::LiveRange(
int relative_id, MachineRepresentation rep,
376 TopLevelLiveRange* top_level)
377 : relative_id_(relative_id),
379 last_interval_(nullptr),
380 first_interval_(nullptr),
382 top_level_(top_level),
384 current_interval_(nullptr),
385 last_processed_use_(nullptr),
386 current_hint_position_(nullptr),
387 splitting_pointer_(nullptr) {
388 DCHECK(AllocatedOperand::IsSupportedRepresentation(rep));
389 bits_ = AssignedRegisterField::encode(kUnassignedRegister) |
390 RepresentationField::encode(rep);
393 void LiveRange::VerifyPositions()
const {
395 UseInterval* interval = first_interval_;
396 for (UsePosition* pos = first_pos_; pos !=
nullptr; pos = pos->next()) {
397 CHECK(Start() <= pos->pos());
398 CHECK(pos->pos() <= End());
399 CHECK_NOT_NULL(interval);
400 while (!interval->Contains(pos->pos()) && interval->end() != pos->pos()) {
401 interval = interval->next();
402 CHECK_NOT_NULL(interval);
407 void LiveRange::VerifyIntervals()
const {
408 DCHECK(first_interval()->start() == Start());
409 LifetimePosition last_end = first_interval()->end();
410 for (UseInterval* interval = first_interval()->next(); interval !=
nullptr;
411 interval = interval->next()) {
412 DCHECK(last_end <= interval->start());
413 last_end = interval->end();
415 DCHECK(last_end == End());
418 void LiveRange::set_assigned_register(
int reg) {
419 DCHECK(!HasRegisterAssigned() && !spilled());
420 bits_ = AssignedRegisterField::update(bits_, reg);
423 void LiveRange::UnsetAssignedRegister() {
424 DCHECK(HasRegisterAssigned() && !spilled());
425 bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
428 void LiveRange::Spill() {
430 DCHECK(!TopLevel()->HasNoSpillType());
432 bits_ = AssignedRegisterField::update(bits_, kUnassignedRegister);
435 RegisterKind LiveRange::kind()
const {
436 return IsFloatingPoint(representation()) ? FP_REGISTERS : GENERAL_REGISTERS;
439 UsePosition* LiveRange::FirstHintPosition(
int* register_index)
const {
440 for (UsePosition* pos = first_pos_; pos !=
nullptr; pos = pos->next()) {
441 if (pos->HintRegister(register_index))
return pos;
446 UsePosition* LiveRange::NextUsePosition(LifetimePosition start)
const {
447 UsePosition* use_pos = last_processed_use_;
448 if (use_pos ==
nullptr || use_pos->pos() > start) {
449 use_pos = first_pos();
451 while (use_pos !=
nullptr && use_pos->pos() < start) {
452 use_pos = use_pos->next();
454 last_processed_use_ = use_pos;
458 UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
459 LifetimePosition start)
const {
460 UsePosition* pos = NextUsePosition(start);
461 while (pos !=
nullptr && !pos->RegisterIsBeneficial()) {
467 LifetimePosition LiveRange::NextLifetimePositionRegisterIsBeneficial(
468 const LifetimePosition& start)
const {
469 UsePosition* next_use = NextUsePositionRegisterIsBeneficial(start);
470 if (next_use ==
nullptr)
return End();
471 return next_use->pos();
474 UsePosition* LiveRange::PreviousUsePositionRegisterIsBeneficial(
475 LifetimePosition start)
const {
476 UsePosition* pos = first_pos();
477 UsePosition* prev =
nullptr;
478 while (pos !=
nullptr && pos->pos() < start) {
479 if (pos->RegisterIsBeneficial()) prev = pos;
485 UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start)
const {
486 UsePosition* pos = NextUsePosition(start);
487 while (pos !=
nullptr && pos->type() != UsePositionType::kRequiresRegister) {
493 UsePosition* LiveRange::NextSlotPosition(LifetimePosition start)
const {
494 for (UsePosition* pos = NextUsePosition(start); pos !=
nullptr;
496 if (pos->type() != UsePositionType::kRequiresSlot)
continue;
502 bool LiveRange::CanBeSpilled(LifetimePosition pos)
const {
505 UsePosition* use_pos = NextRegisterPosition(pos);
506 if (use_pos ==
nullptr)
return true;
507 return use_pos->pos() > pos.NextStart().End();
510 bool LiveRange::IsTopLevel()
const {
return top_level_ ==
this; }
512 InstructionOperand LiveRange::GetAssignedOperand()
const {
513 if (HasRegisterAssigned()) {
515 return AllocatedOperand(LocationOperand::REGISTER, representation(),
516 assigned_register());
519 DCHECK(!HasRegisterAssigned());
520 if (TopLevel()->HasSpillOperand()) {
521 InstructionOperand* op = TopLevel()->GetSpillOperand();
522 DCHECK(!op->IsUnallocated());
525 return TopLevel()->GetSpillRangeOperand();
528 UseInterval* LiveRange::FirstSearchIntervalForPosition(
529 LifetimePosition position)
const {
530 if (current_interval_ ==
nullptr)
return first_interval_;
531 if (current_interval_->start() > position) {
532 current_interval_ =
nullptr;
533 return first_interval_;
535 return current_interval_;
538 void LiveRange::AdvanceLastProcessedMarker(
539 UseInterval* to_start_of, LifetimePosition but_not_past)
const {
540 if (to_start_of ==
nullptr)
return;
541 if (to_start_of->start() > but_not_past)
return;
542 LifetimePosition start = current_interval_ ==
nullptr 543 ? LifetimePosition::Invalid()
544 : current_interval_->start();
545 if (to_start_of->start() > start) {
546 current_interval_ = to_start_of;
550 LiveRange* LiveRange::SplitAt(LifetimePosition position, Zone* zone) {
551 int new_id = TopLevel()->GetNextChildId();
552 LiveRange* child =
new (zone) LiveRange(new_id, representation(), TopLevel());
555 DetachAt(position, child, zone, DoNotConnectHints);
557 child->top_level_ = TopLevel();
558 child->next_ = next_;
563 UsePosition* LiveRange::DetachAt(LifetimePosition position, LiveRange* result,
565 HintConnectionOption connect_hints) {
566 DCHECK(Start() < position);
567 DCHECK(End() > position);
568 DCHECK(result->IsEmpty());
572 UseInterval* current = FirstSearchIntervalForPosition(position);
576 bool split_at_start =
false;
578 if (current->start() == position) {
580 current = first_interval_;
583 UseInterval* after =
nullptr;
584 while (current !=
nullptr) {
585 if (current->Contains(position)) {
586 after = current->SplitAt(position, zone);
589 UseInterval* next = current->next();
590 if (next->start() >= position) {
591 split_at_start = (next->start() == position);
593 current->set_next(
nullptr);
598 DCHECK_NOT_NULL(after);
601 UseInterval* before = current;
602 result->last_interval_ =
603 (last_interval_ == before)
606 result->first_interval_ = after;
607 last_interval_ = before;
611 UsePosition* use_after =
612 splitting_pointer_ ==
nullptr || splitting_pointer_->pos() > position
614 : splitting_pointer_;
615 UsePosition* use_before =
nullptr;
616 if (split_at_start) {
620 while (use_after !=
nullptr && use_after->pos() < position) {
621 use_before = use_after;
622 use_after = use_after->next();
625 while (use_after !=
nullptr && use_after->pos() <= position) {
626 use_before = use_after;
627 use_after = use_after->next();
632 if (use_before !=
nullptr) {
633 use_before->set_next(
nullptr);
635 first_pos_ =
nullptr;
637 result->first_pos_ = use_after;
641 last_processed_use_ =
nullptr;
642 current_interval_ =
nullptr;
644 if (connect_hints == ConnectHints && use_before !=
nullptr &&
645 use_after !=
nullptr) {
646 use_after->SetHint(use_before);
649 VerifyChildStructure();
650 result->VerifyChildStructure();
655 void LiveRange::UpdateParentForAllChildren(TopLevelLiveRange* new_top_level) {
656 LiveRange* child =
this;
657 for (; child !=
nullptr; child = child->next()) {
658 child->top_level_ = new_top_level;
662 void LiveRange::ConvertUsesToOperand(
const InstructionOperand& op,
663 const InstructionOperand& spill_op) {
664 for (UsePosition* pos = first_pos(); pos !=
nullptr; pos = pos->next()) {
665 DCHECK(Start() <= pos->pos() && pos->pos() <= End());
666 if (!pos->HasOperand())
continue;
667 switch (pos->type()) {
668 case UsePositionType::kRequiresSlot:
669 DCHECK(spill_op.IsStackSlot() || spill_op.IsFPStackSlot());
670 InstructionOperand::ReplaceWith(pos->operand(), &spill_op);
672 case UsePositionType::kRequiresRegister:
673 DCHECK(op.IsRegister() || op.IsFPRegister());
675 case UsePositionType::kRegisterOrSlot:
676 case UsePositionType::kRegisterOrSlotOrConstant:
677 InstructionOperand::ReplaceWith(pos->operand(), &op);
688 bool LiveRange::ShouldBeAllocatedBefore(
const LiveRange* other)
const {
689 LifetimePosition start = Start();
690 LifetimePosition other_start = other->Start();
691 if (start == other_start) {
692 UsePosition* pos = first_pos();
693 if (pos ==
nullptr)
return false;
694 UsePosition* other_pos = other->first_pos();
695 if (other_pos ==
nullptr)
return true;
696 return pos->pos() < other_pos->pos();
698 return start < other_start;
701 void LiveRange::SetUseHints(
int register_index) {
702 for (UsePosition* pos = first_pos(); pos !=
nullptr; pos = pos->next()) {
703 if (!pos->HasOperand())
continue;
704 switch (pos->type()) {
705 case UsePositionType::kRequiresSlot:
707 case UsePositionType::kRequiresRegister:
708 case UsePositionType::kRegisterOrSlot:
709 case UsePositionType::kRegisterOrSlotOrConstant:
710 pos->set_assigned_register(register_index);
716 bool LiveRange::CanCover(LifetimePosition position)
const {
717 if (IsEmpty())
return false;
718 return Start() <= position && position < End();
721 bool LiveRange::Covers(LifetimePosition position)
const {
722 if (!CanCover(position))
return false;
723 UseInterval* start_search = FirstSearchIntervalForPosition(position);
724 for (UseInterval* interval = start_search; interval !=
nullptr;
725 interval = interval->next()) {
726 DCHECK(interval->next() ==
nullptr ||
727 interval->next()->start() >= interval->start());
728 AdvanceLastProcessedMarker(interval, position);
729 if (interval->Contains(position))
return true;
730 if (interval->start() > position)
return false;
735 LifetimePosition LiveRange::FirstIntersection(LiveRange* other)
const {
736 UseInterval* b = other->first_interval();
737 if (b ==
nullptr)
return LifetimePosition::Invalid();
738 LifetimePosition advance_last_processed_up_to = b->start();
739 UseInterval* a = FirstSearchIntervalForPosition(b->start());
740 while (a !=
nullptr && b !=
nullptr) {
741 if (a->start() > other->End())
break;
742 if (b->start() > End())
break;
743 LifetimePosition cur_intersection = a->Intersect(b);
744 if (cur_intersection.IsValid()) {
745 return cur_intersection;
747 if (a->start() < b->start()) {
749 if (a ==
nullptr || a->start() > other->End())
break;
750 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
755 return LifetimePosition::Invalid();
758 void LiveRange::Print(
const RegisterConfiguration* config,
759 bool with_children)
const {
761 PrintableLiveRange wrapper;
762 wrapper.register_configuration_ = config;
763 for (
const LiveRange*
i =
this;
i !=
nullptr;
i =
i->next()) {
765 os << wrapper << std::endl;
766 if (!with_children)
break;
770 void LiveRange::Print(
bool with_children)
const {
771 Print(RegisterConfiguration::Default(), with_children);
777 : gap_index(gap_index), operand(operand), next(next) {}
783 TopLevelLiveRange::TopLevelLiveRange(
int vreg, MachineRepresentation rep)
787 splintered_from_(nullptr),
788 spill_operand_(nullptr),
789 spill_move_insertion_locations_(nullptr),
790 spilled_in_deferred_blocks_(false),
791 spill_start_index_(kMaxInt),
794 has_preassigned_slot_(false) {
795 bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
799 int TopLevelLiveRange::debug_virt_reg()
const {
800 return IsSplinter() ? splintered_from()->vreg() : vreg();
804 void TopLevelLiveRange::RecordSpillLocation(
Zone* zone,
int gap_index,
805 InstructionOperand* operand) {
806 DCHECK(HasNoSpillType());
807 spill_move_insertion_locations_ =
new (zone) SpillMoveInsertionList(
808 gap_index, operand, spill_move_insertion_locations_);
811 void TopLevelLiveRange::CommitSpillMoves(InstructionSequence* sequence,
812 const InstructionOperand& op,
813 bool might_be_duplicated) {
814 DCHECK_IMPLIES(op.IsConstant(), GetSpillMoveInsertionLocations() ==
nullptr);
815 Zone* zone = sequence->zone();
817 for (SpillMoveInsertionList* to_spill = GetSpillMoveInsertionLocations();
818 to_spill !=
nullptr; to_spill = to_spill->next) {
819 Instruction* instr = sequence->InstructionAt(to_spill->gap_index);
821 instr->GetOrCreateParallelMove(Instruction::START, zone);
824 if (might_be_duplicated || has_preassigned_slot()) {
826 for (MoveOperands* move_op : *move) {
827 if (move_op->IsEliminated())
continue;
828 if (move_op->source().Equals(*to_spill->operand) &&
829 move_op->destination().Equals(op)) {
831 if (has_preassigned_slot()) move_op->Eliminate();
837 if (!has_preassigned_slot()) {
838 move->AddMove(*to_spill->operand, op);
843 void TopLevelLiveRange::SetSpillOperand(InstructionOperand* operand) {
844 DCHECK(HasNoSpillType());
845 DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
846 set_spill_type(SpillType::kSpillOperand);
847 spill_operand_ = operand;
850 void TopLevelLiveRange::SetSpillRange(SpillRange* spill_range) {
851 DCHECK(!HasSpillOperand());
853 spill_range_ = spill_range;
856 AllocatedOperand TopLevelLiveRange::GetSpillRangeOperand()
const {
857 SpillRange* spill_range = GetSpillRange();
858 int index = spill_range->assigned_slot();
859 return AllocatedOperand(LocationOperand::STACK_SLOT, representation(), index);
862 void TopLevelLiveRange::Splinter(LifetimePosition start, LifetimePosition end,
864 DCHECK(start != Start() || end != End());
867 TopLevelLiveRange splinter_temp(-1, representation());
868 UsePosition* last_in_splinter =
nullptr;
872 DCHECK(start > Start());
875 DCHECK(start > Start());
876 DetachAt(start, &splinter_temp, zone, ConnectHints);
879 DCHECK(start < End() && Start() < end);
881 const int kInvalidId = std::numeric_limits<int>::max();
883 UsePosition* last = DetachAt(start, &splinter_temp, zone, ConnectHints);
885 LiveRange end_part(kInvalidId, this->representation(),
nullptr);
890 splinter_temp.DetachAt(end, &end_part, zone, DoNotConnectHints);
892 next_ = end_part.next_;
893 last_interval_->set_next(end_part.first_interval_);
897 current_interval_ = last_interval_;
898 last_interval_ = end_part.last_interval_;
900 if (first_pos_ ==
nullptr) {
901 first_pos_ = end_part.first_pos_;
903 splitting_pointer_ = last;
904 if (last !=
nullptr) last->set_next(end_part.first_pos_);
908 if (splinter()->IsEmpty()) {
909 splinter()->first_interval_ = splinter_temp.first_interval_;
910 splinter()->last_interval_ = splinter_temp.last_interval_;
912 splinter()->last_interval_->set_next(splinter_temp.first_interval_);
913 splinter()->last_interval_ = splinter_temp.last_interval_;
915 if (splinter()->first_pos() ==
nullptr) {
916 splinter()->first_pos_ = splinter_temp.first_pos_;
918 splinter()->last_pos_->set_next(splinter_temp.first_pos_);
920 if (last_in_splinter !=
nullptr) {
921 splinter()->last_pos_ = last_in_splinter;
923 if (splinter()->first_pos() !=
nullptr &&
924 splinter()->last_pos_ ==
nullptr) {
925 splinter()->last_pos_ = splinter()->first_pos();
926 for (UsePosition* pos = splinter()->first_pos(); pos !=
nullptr;
928 splinter()->last_pos_ = pos;
934 splinter()->Verify();
938 void TopLevelLiveRange::SetSplinteredFrom(TopLevelLiveRange* splinter_parent) {
939 splintered_from_ = splinter_parent;
940 if (!HasSpillOperand() && splinter_parent->spill_range_ !=
nullptr) {
941 SetSpillRange(splinter_parent->spill_range_);
945 void TopLevelLiveRange::UpdateSpillRangePostMerge(TopLevelLiveRange* merged) {
946 DCHECK(merged->TopLevel() ==
this);
948 if (HasNoSpillType() && merged->HasSpillRange()) {
949 set_spill_type(merged->spill_type());
950 DCHECK_LT(0, GetSpillRange()->live_ranges().size());
951 merged->spill_range_ =
nullptr;
953 SpillTypeField::update(merged->bits_, SpillType::kNoSpillType);
957 void TopLevelLiveRange::Merge(TopLevelLiveRange* other, Zone* zone) {
958 DCHECK(Start() < other->Start());
959 DCHECK(other->splintered_from() ==
this);
961 LiveRange* first =
this;
962 LiveRange* second = other;
963 DCHECK(first->Start() < second->Start());
964 while (first !=
nullptr && second !=
nullptr) {
965 DCHECK(first != second);
967 if (second->Start() < first->Start()) {
968 LiveRange* tmp = second;
974 if (first->End() <= second->Start()) {
975 if (first->next() ==
nullptr ||
976 first->next()->Start() > second->Start()) {
978 LiveRange* temp = first->next();
979 first->next_ = second;
983 first = first->next();
988 DCHECK(first->Start() < second->Start());
990 if (first->Start() < second->End() && second->Start() < first->End()) {
991 LiveRange* temp = first->SplitAt(second->Start(), zone);
992 CHECK(temp != first);
993 temp->set_spilled(first->spilled());
994 if (!temp->spilled())
995 temp->set_assigned_register(first->assigned_register());
997 first->next_ = second;
1001 DCHECK(first->End() <= second->Start());
1004 TopLevel()->UpdateParentForAllChildren(TopLevel());
1005 TopLevel()->UpdateSpillRangePostMerge(other);
1006 TopLevel()->set_has_slot_use(TopLevel()->has_slot_use() ||
1007 other->has_slot_use());
1014 void TopLevelLiveRange::VerifyChildrenInOrder()
const {
1015 LifetimePosition last_end = End();
1016 for (
const LiveRange* child = this->next(); child !=
nullptr;
1017 child = child->next()) {
1018 DCHECK(last_end <= child->Start());
1019 last_end = child->End();
1023 void TopLevelLiveRange::Verify()
const {
1024 VerifyChildrenInOrder();
1025 for (
const LiveRange* child =
this; child !=
nullptr; child = child->next()) {
1026 VerifyChildStructure();
1030 void TopLevelLiveRange::ShortenTo(LifetimePosition start) {
1031 TRACE(
"Shorten live range %d to [%d\n", vreg(), start.value());
1032 DCHECK_NOT_NULL(first_interval_);
1033 DCHECK(first_interval_->start() <= start);
1034 DCHECK(start < first_interval_->end());
1035 first_interval_->set_start(start);
1038 void TopLevelLiveRange::EnsureInterval(LifetimePosition start,
1039 LifetimePosition end, Zone* zone) {
1040 TRACE(
"Ensure live range %d in interval [%d %d[\n", vreg(), start.value(),
1042 LifetimePosition new_end = end;
1043 while (first_interval_ !=
nullptr && first_interval_->start() <= end) {
1044 if (first_interval_->end() > end) {
1045 new_end = first_interval_->end();
1047 first_interval_ = first_interval_->next();
1050 UseInterval* new_interval =
new (zone) UseInterval(start, new_end);
1051 new_interval->set_next(first_interval_);
1052 first_interval_ = new_interval;
1053 if (new_interval->next() ==
nullptr) {
1054 last_interval_ = new_interval;
1058 void TopLevelLiveRange::AddUseInterval(LifetimePosition start,
1059 LifetimePosition end, Zone* zone) {
1060 TRACE(
"Add to live range %d interval [%d %d[\n", vreg(), start.value(),
1062 if (first_interval_ ==
nullptr) {
1063 UseInterval* interval =
new (zone) UseInterval(start, end);
1064 first_interval_ = interval;
1065 last_interval_ = interval;
1067 if (end == first_interval_->start()) {
1068 first_interval_->set_start(start);
1069 }
else if (end < first_interval_->start()) {
1070 UseInterval* interval =
new (zone) UseInterval(start, end);
1071 interval->set_next(first_interval_);
1072 first_interval_ = interval;
1077 DCHECK(start <= first_interval_->end());
1078 first_interval_->set_start(Min(start, first_interval_->start()));
1079 first_interval_->set_end(Max(end, first_interval_->end()));
1084 void TopLevelLiveRange::AddUsePosition(UsePosition* use_pos) {
1085 LifetimePosition pos = use_pos->pos();
1086 TRACE(
"Add to live range %d use position %d\n", vreg(), pos.value());
1087 UsePosition* prev_hint =
nullptr;
1088 UsePosition* prev =
nullptr;
1089 UsePosition* current = first_pos_;
1090 while (current !=
nullptr && current->pos() < pos) {
1091 prev_hint = current->HasHint() ? current : prev_hint;
1093 current = current->next();
1096 if (prev ==
nullptr) {
1097 use_pos->set_next(first_pos_);
1098 first_pos_ = use_pos;
1100 use_pos->set_next(prev->next());
1101 prev->set_next(use_pos);
1104 if (prev_hint ==
nullptr && use_pos->HasHint()) {
1105 current_hint_position_ = use_pos;
1109 static bool AreUseIntervalsIntersecting(UseInterval* interval1,
1110 UseInterval* interval2) {
1111 while (interval1 !=
nullptr && interval2 !=
nullptr) {
1112 if (interval1->start() < interval2->start()) {
1113 if (interval1->end() > interval2->start()) {
1116 interval1 = interval1->next();
1118 if (interval2->end() > interval1->start()) {
1121 interval2 = interval2->next();
1127 std::ostream& operator<<(std::ostream& os,
1128 const PrintableLiveRange& printable_range) {
1129 const LiveRange* range = printable_range.range_;
1130 os <<
"Range: " << range->TopLevel()->vreg() <<
":" << range->relative_id()
1132 if (range->TopLevel()->is_phi()) os <<
"phi ";
1133 if (range->TopLevel()->is_non_loop_phi()) os <<
"nlphi ";
1135 os <<
"{" << std::endl;
1136 UseInterval* interval = range->first_interval();
1137 UsePosition* use_pos = range->first_pos();
1138 while (use_pos !=
nullptr) {
1139 if (use_pos->HasOperand()) {
1140 os << *use_pos->operand() << use_pos->pos() <<
" ";
1142 use_pos = use_pos->next();
1146 while (interval !=
nullptr) {
1147 os <<
'[' << interval->start() <<
", " << interval->end() <<
')' 1149 interval = interval->next();
1156 void PrintBlockRow(std::ostream& os,
const InstructionBlocks& blocks) {
1158 for (
auto block : blocks) {
1159 LifetimePosition start_pos = LifetimePosition::GapFromInstructionIndex(
1160 block->first_instruction_index());
1161 LifetimePosition end_pos = LifetimePosition::GapFromInstructionIndex(
1162 block->last_instruction_index())
1164 int length = end_pos.value() - start_pos.value();
1165 constexpr
int kMaxPrefixLength = 32;
1166 char buffer[kMaxPrefixLength];
1167 int rpo_number = block->rpo_number().ToInt();
1168 const char* deferred_marker = block->IsDeferred() ?
"(deferred)" :
"";
1169 int max_prefix_length = std::min(length, kMaxPrefixLength);
1170 int prefix = snprintf(buffer, max_prefix_length,
"[-B%d-%s", rpo_number,
1173 int remaining = length - std::min(prefix, max_prefix_length) - 1;
1174 for (
int i = 0;
i < remaining; ++
i) os <<
'-';
1181 void LinearScanAllocator::PrintRangeOverview(std::ostream& os) {
1183 for (
auto toplevel : data()->live_ranges()) {
1184 if (!CanProcessRange(toplevel))
continue;
1185 if (rowcount++ % 10 == 0) PrintBlockRow(os, code()->instruction_blocks());
1187 os << std::setw(3) << toplevel->vreg()
1188 << (toplevel->IsSplinter() ?
"s:" :
": ");
1189 for (LiveRange* range = toplevel; range !=
nullptr; range = range->next()) {
1190 for (UseInterval* interval = range->first_interval(); interval !=
nullptr;
1191 interval = interval->next()) {
1192 LifetimePosition start = interval->start();
1193 LifetimePosition end = interval->end();
1194 CHECK_GE(start.value(), position);
1195 for (; start.value() > position; position++) {
1198 int length = end.value() - start.value();
1199 constexpr
int kMaxPrefixLength = 32;
1200 char buffer[kMaxPrefixLength];
1201 int max_prefix_length = std::min(length + 1, kMaxPrefixLength);
1203 if (range->spilled()) {
1204 prefix = snprintf(buffer, max_prefix_length,
"|ss");
1206 const char* reg_name;
1207 if (range->assigned_register() == kUnassignedRegister) {
1210 reg_name = RegisterName(range->assigned_register());
1212 prefix = snprintf(buffer, max_prefix_length,
"|%s", reg_name);
1215 position += std::min(prefix, max_prefix_length - 1);
1216 CHECK_GE(end.value(), position);
1217 const char line_style = range->spilled() ?
'-' :
'=';
1218 for (; end.value() > position; position++) {
1227 SpillRange::SpillRange(TopLevelLiveRange* parent, Zone* zone)
1228 : live_ranges_(zone),
1229 assigned_slot_(kUnassignedSlot),
1230 byte_width_(GetByteWidth(parent->representation())) {
1234 DCHECK(!parent->IsSplinter());
1235 UseInterval* result =
nullptr;
1236 UseInterval* node =
nullptr;
1238 for (LiveRange* range = parent; range !=
nullptr; range = range->next()) {
1239 UseInterval* src = range->first_interval();
1240 while (src !=
nullptr) {
1241 UseInterval* new_node =
new (zone) UseInterval(src->start(), src->end());
1242 if (result ==
nullptr) {
1245 node->set_next(new_node);
1251 use_interval_ = result;
1252 live_ranges().push_back(parent);
1253 end_position_ = node->end();
1254 parent->SetSpillRange(
this);
1257 bool SpillRange::IsIntersectingWith(SpillRange* other)
const {
1258 if (this->use_interval_ ==
nullptr || other->use_interval_ ==
nullptr ||
1259 this->End() <= other->use_interval_->start() ||
1260 other->End() <= this->use_interval_->start()) {
1263 return AreUseIntervalsIntersecting(use_interval_, other->use_interval_);
1266 bool SpillRange::TryMerge(SpillRange* other) {
1267 if (HasSlot() || other->HasSlot())
return false;
1268 if (byte_width() != other->byte_width() || IsIntersectingWith(other))
1271 LifetimePosition max = LifetimePosition::MaxPosition();
1272 if (End() < other->End() && other->End() != max) {
1273 end_position_ = other->End();
1275 other->end_position_ = max;
1277 MergeDisjointIntervals(other->use_interval_);
1278 other->use_interval_ =
nullptr;
1280 for (TopLevelLiveRange* range : other->live_ranges()) {
1281 DCHECK(range->GetSpillRange() == other);
1282 range->SetSpillRange(
this);
1285 live_ranges().insert(live_ranges().end(), other->live_ranges().begin(),
1286 other->live_ranges().end());
1287 other->live_ranges().clear();
1292 void SpillRange::MergeDisjointIntervals(UseInterval* other) {
1293 UseInterval* tail =
nullptr;
1294 UseInterval* current = use_interval_;
1295 while (other !=
nullptr) {
1297 if (current ==
nullptr || current->start() > other->start()) {
1298 std::swap(current, other);
1301 DCHECK(other ==
nullptr || current->end() <= other->start());
1303 if (tail ==
nullptr) {
1304 use_interval_ = current;
1306 tail->set_next(current);
1309 current = current->next();
1314 void SpillRange::Print()
const {
1316 os <<
"{" << std::endl;
1317 for (TopLevelLiveRange* range : live_ranges()) {
1318 os << range->vreg() <<
" ";
1322 for (UseInterval*
i = interval();
i !=
nullptr;
i =
i->next()) {
1323 os <<
'[' <<
i->start() <<
", " <<
i->end() <<
')' << std::endl;
1325 os <<
"}" << std::endl;
1328 RegisterAllocationData::PhiMapValue::PhiMapValue(PhiInstruction* phi,
1329 const InstructionBlock* block,
1333 incoming_operands_(zone),
1334 assigned_register_(kUnassignedRegister) {
1335 incoming_operands_.reserve(phi->operands().size());
1338 void RegisterAllocationData::PhiMapValue::AddOperand(
1339 InstructionOperand* operand) {
1340 incoming_operands_.push_back(operand);
1343 void RegisterAllocationData::PhiMapValue::CommitAssignment(
1344 const InstructionOperand& assigned) {
1345 for (InstructionOperand* operand : incoming_operands_) {
1346 InstructionOperand::ReplaceWith(operand, &assigned);
1350 RegisterAllocationData::RegisterAllocationData(
1351 const RegisterConfiguration* config, Zone* zone, Frame* frame,
1352 InstructionSequence* code,
const char* debug_name)
1353 : allocation_zone_(zone),
1356 debug_name_(debug_name),
1358 phi_map_(allocation_zone()),
1359 live_in_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1360 live_out_sets_(code->InstructionBlockCount(), nullptr, allocation_zone()),
1361 live_ranges_(code->VirtualRegisterCount() * 2, nullptr,
1363 fixed_live_ranges_(this->config()->num_general_registers(), nullptr,
1365 fixed_float_live_ranges_(allocation_zone()),
1366 fixed_double_live_ranges_(this->config()->num_double_registers(), nullptr,
1368 fixed_simd128_live_ranges_(allocation_zone()),
1369 spill_ranges_(code->VirtualRegisterCount(), nullptr, allocation_zone()),
1370 delayed_references_(allocation_zone()),
1371 assigned_registers_(nullptr),
1372 assigned_double_registers_(nullptr),
1373 virtual_register_count_(code->VirtualRegisterCount()),
1374 preassigned_slot_ranges_(zone) {
1375 if (!kSimpleFPAliasing) {
1376 fixed_float_live_ranges_.resize(this->config()->num_float_registers(),
1378 fixed_simd128_live_ranges_.resize(this->config()->num_simd128_registers(),
1382 assigned_registers_ =
new (code_zone())
1383 BitVector(this->config()->num_general_registers(), code_zone());
1384 assigned_double_registers_ =
new (code_zone())
1385 BitVector(this->config()->num_double_registers(), code_zone());
1386 this->frame()->SetAllocatedRegisters(assigned_registers_);
1387 this->frame()->SetAllocatedDoubleRegisters(assigned_double_registers_);
1390 MoveOperands* RegisterAllocationData::AddGapMove(
1391 int index, Instruction::GapPosition position,
1392 const InstructionOperand& from,
const InstructionOperand& to) {
1393 Instruction* instr = code()->InstructionAt(index);
1394 ParallelMove* moves = instr->GetOrCreateParallelMove(position, code_zone());
1395 return moves->AddMove(from, to);
1398 MachineRepresentation RegisterAllocationData::RepresentationFor(
1399 int virtual_register) {
1400 DCHECK_LT(virtual_register, code()->VirtualRegisterCount());
1401 return code()->GetRepresentation(virtual_register);
1404 TopLevelLiveRange* RegisterAllocationData::GetOrCreateLiveRangeFor(
int index) {
1405 if (index >= static_cast<int>(live_ranges().size())) {
1406 live_ranges().resize(index + 1,
nullptr);
1408 TopLevelLiveRange* result = live_ranges()[index];
1409 if (result ==
nullptr) {
1410 result = NewLiveRange(index, RepresentationFor(index));
1411 live_ranges()[index] = result;
1416 TopLevelLiveRange* RegisterAllocationData::NewLiveRange(
1417 int index, MachineRepresentation rep) {
1418 return new (allocation_zone()) TopLevelLiveRange(index, rep);
1421 int RegisterAllocationData::GetNextLiveRangeId() {
1422 int vreg = virtual_register_count_++;
1423 if (vreg >= static_cast<int>(live_ranges().size())) {
1424 live_ranges().resize(vreg + 1,
nullptr);
1429 TopLevelLiveRange* RegisterAllocationData::NextLiveRange(
1430 MachineRepresentation rep) {
1431 int vreg = GetNextLiveRangeId();
1432 TopLevelLiveRange* ret = NewLiveRange(vreg, rep);
1436 RegisterAllocationData::PhiMapValue* RegisterAllocationData::InitializePhiMap(
1437 const InstructionBlock* block, PhiInstruction* phi) {
1438 RegisterAllocationData::PhiMapValue* map_value =
new (allocation_zone())
1439 RegisterAllocationData::PhiMapValue(phi, block, allocation_zone());
1441 phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
1447 RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1448 int virtual_register) {
1449 auto it = phi_map_.find(virtual_register);
1450 DCHECK(it != phi_map_.end());
1454 RegisterAllocationData::PhiMapValue* RegisterAllocationData::GetPhiMapValueFor(
1455 TopLevelLiveRange* top_range) {
1456 return GetPhiMapValueFor(top_range->vreg());
1459 bool RegisterAllocationData::ExistsUseWithoutDefinition() {
1461 BitVector::Iterator iterator(live_in_sets()[0]);
1462 while (!iterator.Done()) {
1464 int operand_index = iterator.Current();
1465 PrintF(
"Register allocator error: live v%d reached first block.\n",
1467 LiveRange* range = GetOrCreateLiveRangeFor(operand_index);
1468 PrintF(
" (first use is at %d)\n", range->first_pos()->pos().value());
1469 if (debug_name() ==
nullptr) {
1472 PrintF(
" (function: %s)\n", debug_name());
1487 bool RegisterAllocationData::RangesDefinedInDeferredStayInDeferred() {
1488 const size_t live_ranges_size = live_ranges().size();
1489 for (
const TopLevelLiveRange* range : live_ranges()) {
1490 CHECK_EQ(live_ranges_size,
1491 live_ranges().size());
1492 if (range ==
nullptr || range->IsEmpty() ||
1494 ->GetInstructionBlock(range->Start().ToInstructionIndex())
1498 for (
const UseInterval*
i = range->first_interval();
i !=
nullptr;
1500 int first =
i->FirstGapIndex();
1501 int last =
i->LastGapIndex();
1502 for (
int instr = first; instr <= last;) {
1503 const InstructionBlock* block = code()->GetInstructionBlock(instr);
1504 if (!block->IsDeferred())
return false;
1505 instr = block->last_instruction_index() + 1;
1512 SpillRange* RegisterAllocationData::AssignSpillRangeToLiveRange(
1513 TopLevelLiveRange* range) {
1514 DCHECK(!range->HasSpillOperand());
1516 SpillRange* spill_range = range->GetAllocatedSpillRange();
1517 if (spill_range ==
nullptr) {
1518 DCHECK(!range->IsSplinter());
1519 spill_range =
new (allocation_zone()) SpillRange(range, allocation_zone());
1521 range->set_spill_type(TopLevelLiveRange::SpillType::kSpillRange);
1523 int spill_range_index =
1524 range->IsSplinter() ? range->splintered_from()->vreg() : range->vreg();
1526 spill_ranges()[spill_range_index] = spill_range;
1531 SpillRange* RegisterAllocationData::CreateSpillRangeForLiveRange(
1532 TopLevelLiveRange* range) {
1533 DCHECK(!range->HasSpillOperand());
1534 DCHECK(!range->IsSplinter());
1535 SpillRange* spill_range =
1536 new (allocation_zone()) SpillRange(range, allocation_zone());
1540 void RegisterAllocationData::MarkAllocated(MachineRepresentation rep,
1543 case MachineRepresentation::kFloat32:
1544 case MachineRepresentation::kSimd128:
1545 if (kSimpleFPAliasing) {
1546 assigned_double_registers_->Add(index);
1548 int alias_base_index = -1;
1549 int aliases = config()->GetAliases(
1550 rep, index, MachineRepresentation::kFloat64, &alias_base_index);
1551 DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
1553 int aliased_reg = alias_base_index + aliases;
1554 assigned_double_registers_->Add(aliased_reg);
1558 case MachineRepresentation::kFloat64:
1559 assigned_double_registers_->Add(index);
1562 DCHECK(!IsFloatingPoint(rep));
1563 assigned_registers_->Add(index);
1568 bool RegisterAllocationData::IsBlockBoundary(LifetimePosition pos)
const {
1569 return pos.IsFullStart() &&
1570 code()->GetInstructionBlock(pos.ToInstructionIndex())->code_start() ==
1571 pos.ToInstructionIndex();
1574 ConstraintBuilder::ConstraintBuilder(RegisterAllocationData* data)
1577 InstructionOperand* ConstraintBuilder::AllocateFixed(
1578 UnallocatedOperand* operand,
int pos,
bool is_tagged) {
1579 TRACE(
"Allocating fixed reg for op %d\n", operand->virtual_register());
1580 DCHECK(operand->HasFixedPolicy());
1581 InstructionOperand allocated;
1582 MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
1583 int virtual_register = operand->virtual_register();
1584 if (virtual_register != InstructionOperand::kInvalidVirtualRegister) {
1585 rep = data()->RepresentationFor(virtual_register);
1587 if (operand->HasFixedSlotPolicy()) {
1588 allocated = AllocatedOperand(AllocatedOperand::STACK_SLOT, rep,
1589 operand->fixed_slot_index());
1590 }
else if (operand->HasFixedRegisterPolicy()) {
1591 DCHECK(!IsFloatingPoint(rep));
1592 DCHECK(data()->config()->IsAllocatableGeneralCode(
1593 operand->fixed_register_index()));
1594 allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1595 operand->fixed_register_index());
1596 }
else if (operand->HasFixedFPRegisterPolicy()) {
1597 DCHECK(IsFloatingPoint(rep));
1598 DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, virtual_register);
1599 allocated = AllocatedOperand(AllocatedOperand::REGISTER, rep,
1600 operand->fixed_register_index());
1604 InstructionOperand::ReplaceWith(operand, &allocated);
1606 TRACE(
"Fixed reg is tagged at %d\n", pos);
1607 Instruction* instr = code()->InstructionAt(pos);
1608 if (instr->HasReferenceMap()) {
1609 instr->reference_map()->RecordReference(*AllocatedOperand::cast(operand));
1615 void ConstraintBuilder::MeetRegisterConstraints() {
1616 for (InstructionBlock* block : code()->instruction_blocks()) {
1617 MeetRegisterConstraints(block);
1621 void ConstraintBuilder::MeetRegisterConstraints(
const InstructionBlock* block) {
1622 int start = block->first_instruction_index();
1623 int end = block->last_instruction_index();
1624 DCHECK_NE(-1, start);
1625 for (
int i = start;
i <= end; ++
i) {
1626 MeetConstraintsBefore(
i);
1627 if (
i != end) MeetConstraintsAfter(
i);
1630 MeetRegisterConstraintsForLastInstructionInBlock(block);
1633 void ConstraintBuilder::MeetRegisterConstraintsForLastInstructionInBlock(
1634 const InstructionBlock* block) {
1635 int end = block->last_instruction_index();
1636 Instruction* last_instruction = code()->InstructionAt(end);
1637 for (
size_t i = 0;
i < last_instruction->OutputCount();
i++) {
1638 InstructionOperand* output_operand = last_instruction->OutputAt(
i);
1639 DCHECK(!output_operand->IsConstant());
1640 UnallocatedOperand* output = UnallocatedOperand::cast(output_operand);
1641 int output_vreg = output->virtual_register();
1642 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1643 bool assigned =
false;
1644 if (output->HasFixedPolicy()) {
1645 AllocateFixed(output, -1,
false);
1647 if (output->IsStackSlot()) {
1648 DCHECK(LocationOperand::cast(output)->index() <
1649 data()->frame()->GetSpillSlotCount());
1650 range->SetSpillOperand(LocationOperand::cast(output));
1651 range->SetSpillStartIndex(end);
1655 for (
const RpoNumber& succ : block->successors()) {
1656 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1657 DCHECK_EQ(1, successor->PredecessorCount());
1658 int gap_index = successor->first_instruction_index();
1661 UnallocatedOperand output_copy(UnallocatedOperand::REGISTER_OR_SLOT,
1663 data()->AddGapMove(gap_index, Instruction::START, *output, output_copy);
1668 for (
const RpoNumber& succ : block->successors()) {
1669 const InstructionBlock* successor = code()->InstructionBlockAt(succ);
1670 DCHECK_EQ(1, successor->PredecessorCount());
1671 int gap_index = successor->first_instruction_index();
1672 range->RecordSpillLocation(allocation_zone(), gap_index, output);
1673 range->SetSpillStartIndex(gap_index);
1679 void ConstraintBuilder::MeetConstraintsAfter(
int instr_index) {
1680 Instruction* first = code()->InstructionAt(instr_index);
1682 for (
size_t i = 0;
i < first->TempCount();
i++) {
1683 UnallocatedOperand* temp = UnallocatedOperand::cast(first->TempAt(
i));
1684 if (temp->HasFixedPolicy()) AllocateFixed(temp, instr_index,
false);
1687 for (
size_t i = 0;
i < first->OutputCount();
i++) {
1688 InstructionOperand* output = first->OutputAt(
i);
1689 if (output->IsConstant()) {
1690 int output_vreg = ConstantOperand::cast(output)->virtual_register();
1691 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(output_vreg);
1692 range->SetSpillStartIndex(instr_index + 1);
1693 range->SetSpillOperand(output);
1696 UnallocatedOperand* first_output = UnallocatedOperand::cast(output);
1697 TopLevelLiveRange* range =
1698 data()->GetOrCreateLiveRangeFor(first_output->virtual_register());
1699 bool assigned =
false;
1700 if (first_output->HasFixedPolicy()) {
1701 int output_vreg = first_output->virtual_register();
1702 UnallocatedOperand output_copy(UnallocatedOperand::REGISTER_OR_SLOT,
1704 bool is_tagged = code()->IsReference(output_vreg);
1705 if (first_output->HasSecondaryStorage()) {
1706 range->MarkHasPreassignedSlot();
1707 data()->preassigned_slot_ranges().push_back(
1708 std::make_pair(range, first_output->GetSecondaryStorage()));
1710 AllocateFixed(first_output, instr_index, is_tagged);
1713 if (first_output->IsStackSlot()) {
1714 DCHECK(LocationOperand::cast(first_output)->index() <
1715 data()->frame()->GetTotalFrameSlotCount());
1716 range->SetSpillOperand(LocationOperand::cast(first_output));
1717 range->SetSpillStartIndex(instr_index + 1);
1720 data()->AddGapMove(instr_index + 1, Instruction::START, *first_output,
1726 range->RecordSpillLocation(allocation_zone(), instr_index + 1,
1728 range->SetSpillStartIndex(instr_index + 1);
1733 void ConstraintBuilder::MeetConstraintsBefore(
int instr_index) {
1734 Instruction* second = code()->InstructionAt(instr_index);
1736 for (
size_t i = 0;
i < second->InputCount();
i++) {
1737 InstructionOperand* input = second->InputAt(
i);
1738 if (input->IsImmediate() || input->IsExplicit()) {
1741 UnallocatedOperand* cur_input = UnallocatedOperand::cast(input);
1742 if (cur_input->HasFixedPolicy()) {
1743 int input_vreg = cur_input->virtual_register();
1744 UnallocatedOperand input_copy(UnallocatedOperand::REGISTER_OR_SLOT,
1746 bool is_tagged = code()->IsReference(input_vreg);
1747 AllocateFixed(cur_input, instr_index, is_tagged);
1748 data()->AddGapMove(instr_index, Instruction::END, input_copy, *cur_input);
1752 for (
size_t i = 0;
i < second->OutputCount();
i++) {
1753 InstructionOperand* output = second->OutputAt(
i);
1754 if (!output->IsUnallocated())
continue;
1755 UnallocatedOperand* second_output = UnallocatedOperand::cast(output);
1756 if (!second_output->HasSameAsInputPolicy())
continue;
1758 UnallocatedOperand* cur_input =
1759 UnallocatedOperand::cast(second->InputAt(0));
1760 int output_vreg = second_output->virtual_register();
1761 int input_vreg = cur_input->virtual_register();
1762 UnallocatedOperand input_copy(UnallocatedOperand::REGISTER_OR_SLOT,
1765 UnallocatedOperand(*cur_input, second_output->virtual_register());
1766 MoveOperands* gap_move = data()->AddGapMove(instr_index, Instruction::END,
1767 input_copy, *cur_input);
1768 if (code()->IsReference(input_vreg) && !code()->IsReference(output_vreg)) {
1769 if (second->HasReferenceMap()) {
1770 RegisterAllocationData::DelayedReference delayed_reference = {
1771 second->reference_map(), &gap_move->source()};
1772 data()->delayed_references().push_back(delayed_reference);
1774 }
else if (!code()->IsReference(input_vreg) &&
1775 code()->IsReference(output_vreg)) {
1786 void ConstraintBuilder::ResolvePhis() {
1788 for (InstructionBlock* block : base::Reversed(code()->instruction_blocks())) {
1793 void ConstraintBuilder::ResolvePhis(
const InstructionBlock* block) {
1794 for (PhiInstruction* phi : block->phis()) {
1795 int phi_vreg = phi->virtual_register();
1796 RegisterAllocationData::PhiMapValue* map_value =
1797 data()->InitializePhiMap(block, phi);
1798 InstructionOperand& output = phi->output();
1800 for (
size_t i = 0;
i < phi->operands().size(); ++
i) {
1801 InstructionBlock* cur_block =
1802 code()->InstructionBlockAt(block->predecessors()[
i]);
1803 UnallocatedOperand input(UnallocatedOperand::REGISTER_OR_SLOT,
1804 phi->operands()[
i]);
1805 MoveOperands* move = data()->AddGapMove(
1806 cur_block->last_instruction_index(), Instruction::END, input, output);
1807 map_value->AddOperand(&move->destination());
1809 ->InstructionAt(cur_block->last_instruction_index())
1810 ->HasReferenceMap());
1812 TopLevelLiveRange* live_range = data()->GetOrCreateLiveRangeFor(phi_vreg);
1813 int gap_index = block->first_instruction_index();
1814 live_range->RecordSpillLocation(allocation_zone(), gap_index, &output);
1815 live_range->SetSpillStartIndex(gap_index);
1817 live_range->set_is_phi(
true);
1818 live_range->set_is_non_loop_phi(!block->IsLoopHeader());
1822 LiveRangeBuilder::LiveRangeBuilder(RegisterAllocationData* data,
1824 : data_(data), phi_hints_(local_zone) {}
1826 BitVector* LiveRangeBuilder::ComputeLiveOut(
const InstructionBlock* block,
1827 RegisterAllocationData* data) {
1828 size_t block_index = block->rpo_number().ToSize();
1829 BitVector* live_out = data->live_out_sets()[block_index];
1830 if (live_out ==
nullptr) {
1833 Zone* zone = data->allocation_zone();
1834 const InstructionSequence* code = data->code();
1836 live_out =
new (zone) BitVector(code->VirtualRegisterCount(), zone);
1839 for (
const RpoNumber& succ : block->successors()) {
1841 if (succ <= block->rpo_number())
continue;
1842 BitVector* live_in = data->live_in_sets()[succ.ToSize()];
1843 if (live_in !=
nullptr) live_out->Union(*live_in);
1847 const InstructionBlock* successor = code->InstructionBlockAt(succ);
1848 size_t index = successor->PredecessorIndexOf(block->rpo_number());
1849 DCHECK(index < successor->PredecessorCount());
1850 for (PhiInstruction* phi : successor->phis()) {
1851 live_out->Add(phi->operands()[index]);
1854 data->live_out_sets()[block_index] = live_out;
1859 void LiveRangeBuilder::AddInitialIntervals(
const InstructionBlock* block,
1860 BitVector* live_out) {
1863 LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
1864 block->first_instruction_index());
1865 LifetimePosition end = LifetimePosition::InstructionFromInstructionIndex(
1866 block->last_instruction_index())
1868 BitVector::Iterator iterator(live_out);
1869 while (!iterator.Done()) {
1870 int operand_index = iterator.Current();
1871 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
1872 range->AddUseInterval(start, end, allocation_zone());
1877 int LiveRangeBuilder::FixedFPLiveRangeID(
int index, MachineRepresentation rep) {
1878 int result = -index - 1;
1880 case MachineRepresentation::kSimd128:
1881 result -= config()->num_float_registers();
1883 case MachineRepresentation::kFloat32:
1884 result -= config()->num_double_registers();
1886 case MachineRepresentation::kFloat64:
1887 result -= config()->num_general_registers();
1896 TopLevelLiveRange* LiveRangeBuilder::FixedLiveRangeFor(
int index) {
1897 DCHECK(index < config()->num_general_registers());
1898 TopLevelLiveRange* result = data()->fixed_live_ranges()[index];
1899 if (result ==
nullptr) {
1900 MachineRepresentation rep = InstructionSequence::DefaultRepresentation();
1901 result = data()->NewLiveRange(FixedLiveRangeID(index), rep);
1902 DCHECK(result->IsFixed());
1903 result->set_assigned_register(index);
1904 data()->MarkAllocated(rep, index);
1905 data()->fixed_live_ranges()[index] = result;
1910 TopLevelLiveRange* LiveRangeBuilder::FixedFPLiveRangeFor(
1911 int index, MachineRepresentation rep) {
1912 int num_regs = config()->num_double_registers();
1913 ZoneVector<TopLevelLiveRange*>* live_ranges =
1914 &data()->fixed_double_live_ranges();
1915 if (!kSimpleFPAliasing) {
1917 case MachineRepresentation::kFloat32:
1918 num_regs = config()->num_float_registers();
1919 live_ranges = &data()->fixed_float_live_ranges();
1921 case MachineRepresentation::kSimd128:
1922 num_regs = config()->num_simd128_registers();
1923 live_ranges = &data()->fixed_simd128_live_ranges();
1930 DCHECK(index < num_regs);
1932 TopLevelLiveRange* result = (*live_ranges)[index];
1933 if (result ==
nullptr) {
1934 result = data()->NewLiveRange(FixedFPLiveRangeID(index, rep), rep);
1935 DCHECK(result->IsFixed());
1936 result->set_assigned_register(index);
1937 data()->MarkAllocated(rep, index);
1938 (*live_ranges)[index] = result;
1943 TopLevelLiveRange* LiveRangeBuilder::LiveRangeFor(InstructionOperand* operand) {
1944 if (operand->IsUnallocated()) {
1945 return data()->GetOrCreateLiveRangeFor(
1946 UnallocatedOperand::cast(operand)->virtual_register());
1947 }
else if (operand->IsConstant()) {
1948 return data()->GetOrCreateLiveRangeFor(
1949 ConstantOperand::cast(operand)->virtual_register());
1950 }
else if (operand->IsRegister()) {
1951 return FixedLiveRangeFor(
1952 LocationOperand::cast(operand)->GetRegister().code());
1953 }
else if (operand->IsFPRegister()) {
1954 LocationOperand* op = LocationOperand::cast(operand);
1955 return FixedFPLiveRangeFor(op->register_code(), op->representation());
1961 UsePosition* LiveRangeBuilder::NewUsePosition(LifetimePosition pos,
1962 InstructionOperand* operand,
1964 UsePositionHintType hint_type) {
1965 return new (allocation_zone()) UsePosition(pos, operand, hint, hint_type);
1968 UsePosition* LiveRangeBuilder::Define(LifetimePosition position,
1969 InstructionOperand* operand,
void* hint,
1970 UsePositionHintType hint_type) {
1971 TopLevelLiveRange* range = LiveRangeFor(operand);
1972 if (range ==
nullptr)
return nullptr;
1974 if (range->IsEmpty() || range->Start() > position) {
1976 range->AddUseInterval(position, position.NextStart(), allocation_zone());
1977 range->AddUsePosition(NewUsePosition(position.NextStart()));
1979 range->ShortenTo(position);
1981 if (!operand->IsUnallocated())
return nullptr;
1982 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
1983 UsePosition* use_pos =
1984 NewUsePosition(position, unalloc_operand, hint, hint_type);
1985 range->AddUsePosition(use_pos);
1989 UsePosition* LiveRangeBuilder::Use(LifetimePosition block_start,
1990 LifetimePosition position,
1991 InstructionOperand* operand,
void* hint,
1992 UsePositionHintType hint_type) {
1993 TopLevelLiveRange* range = LiveRangeFor(operand);
1994 if (range ==
nullptr)
return nullptr;
1995 UsePosition* use_pos =
nullptr;
1996 if (operand->IsUnallocated()) {
1997 UnallocatedOperand* unalloc_operand = UnallocatedOperand::cast(operand);
1998 use_pos = NewUsePosition(position, unalloc_operand, hint, hint_type);
1999 range->AddUsePosition(use_pos);
2001 range->AddUseInterval(block_start, position, allocation_zone());
2005 void LiveRangeBuilder::ProcessInstructions(
const InstructionBlock* block,
2007 int block_start = block->first_instruction_index();
2008 LifetimePosition block_start_position =
2009 LifetimePosition::GapFromInstructionIndex(block_start);
2010 bool fixed_float_live_ranges =
false;
2011 bool fixed_simd128_live_ranges =
false;
2012 if (!kSimpleFPAliasing) {
2013 int mask = data()->code()->representation_mask();
2014 fixed_float_live_ranges = (mask & kFloat32Bit) != 0;
2015 fixed_simd128_live_ranges = (mask & kSimd128Bit) != 0;
2018 for (
int index = block->last_instruction_index(); index >= block_start;
2020 LifetimePosition curr_position =
2021 LifetimePosition::InstructionFromInstructionIndex(index);
2022 Instruction* instr = code()->InstructionAt(index);
2023 DCHECK_NOT_NULL(instr);
2024 DCHECK(curr_position.IsInstructionPosition());
2026 for (
size_t i = 0;
i < instr->OutputCount();
i++) {
2027 InstructionOperand* output = instr->OutputAt(
i);
2028 if (output->IsUnallocated()) {
2030 DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
2031 int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
2032 live->Remove(out_vreg);
2033 }
else if (output->IsConstant()) {
2034 int out_vreg = ConstantOperand::cast(output)->virtual_register();
2035 live->Remove(out_vreg);
2037 if (block->IsHandler() && index == block_start && output->IsAllocated() &&
2038 output->IsRegister() &&
2039 AllocatedOperand::cast(output)->GetRegister() ==
2040 v8::internal::kReturnRegister0) {
2045 Define(LifetimePosition::GapFromInstructionIndex(index), output);
2047 Define(curr_position, output);
2051 if (instr->ClobbersRegisters()) {
2052 for (
int i = 0;
i < config()->num_allocatable_general_registers(); ++
i) {
2057 int code = config()->GetAllocatableGeneralCode(
i);
2058 TopLevelLiveRange* range = FixedLiveRangeFor(code);
2059 range->AddUseInterval(curr_position, curr_position.End(),
2064 if (instr->ClobbersDoubleRegisters()) {
2065 for (
int i = 0;
i < config()->num_allocatable_double_registers(); ++
i) {
2068 int code = config()->GetAllocatableDoubleCode(
i);
2069 TopLevelLiveRange* range =
2070 FixedFPLiveRangeFor(code, MachineRepresentation::kFloat64);
2071 range->AddUseInterval(curr_position, curr_position.End(),
2075 if (!kSimpleFPAliasing) {
2076 if (fixed_float_live_ranges) {
2077 for (
int i = 0;
i < config()->num_allocatable_float_registers();
2081 int code = config()->GetAllocatableFloatCode(
i);
2082 TopLevelLiveRange* range =
2083 FixedFPLiveRangeFor(code, MachineRepresentation::kFloat32);
2084 range->AddUseInterval(curr_position, curr_position.End(),
2088 if (fixed_simd128_live_ranges) {
2089 for (
int i = 0;
i < config()->num_allocatable_simd128_registers();
2091 int code = config()->GetAllocatableSimd128Code(
i);
2092 TopLevelLiveRange* range =
2093 FixedFPLiveRangeFor(code, MachineRepresentation::kSimd128);
2094 range->AddUseInterval(curr_position, curr_position.End(),
2101 for (
size_t i = 0;
i < instr->InputCount();
i++) {
2102 InstructionOperand* input = instr->InputAt(
i);
2103 if (input->IsImmediate() || input->IsExplicit()) {
2106 LifetimePosition use_pos;
2107 if (input->IsUnallocated() &&
2108 UnallocatedOperand::cast(input)->IsUsedAtStart()) {
2109 use_pos = curr_position;
2111 use_pos = curr_position.End();
2114 if (input->IsUnallocated()) {
2115 UnallocatedOperand* unalloc = UnallocatedOperand::cast(input);
2116 int vreg = unalloc->virtual_register();
2118 if (unalloc->HasSlotPolicy()) {
2119 data()->GetOrCreateLiveRangeFor(vreg)->set_has_slot_use(
true);
2122 Use(block_start_position, use_pos, input);
2125 for (
size_t i = 0;
i < instr->TempCount();
i++) {
2126 InstructionOperand* temp = instr->TempAt(
i);
2128 DCHECK_IMPLIES(temp->IsUnallocated(),
2129 !UnallocatedOperand::cast(temp)->HasSlotPolicy());
2130 if (instr->ClobbersTemps()) {
2131 if (temp->IsRegister())
continue;
2132 if (temp->IsUnallocated()) {
2133 UnallocatedOperand* temp_unalloc = UnallocatedOperand::cast(temp);
2134 if (temp_unalloc->HasFixedPolicy()) {
2139 Use(block_start_position, curr_position.End(), temp);
2140 Define(curr_position, temp);
2144 const Instruction::GapPosition kPositions[] = {Instruction::END,
2145 Instruction::START};
2146 curr_position = curr_position.PrevStart();
2147 DCHECK(curr_position.IsGapPosition());
2148 for (
const Instruction::GapPosition& position : kPositions) {
2149 ParallelMove* move = instr->GetParallelMove(position);
2150 if (move ==
nullptr)
continue;
2151 if (position == Instruction::END) {
2152 curr_position = curr_position.End();
2154 curr_position = curr_position.Start();
2156 for (MoveOperands* cur : *move) {
2157 InstructionOperand& from = cur->source();
2158 InstructionOperand& to = cur->destination();
2160 UsePositionHintType hint_type = UsePosition::HintTypeForOperand(to);
2161 UsePosition* to_use =
nullptr;
2163 if (to.IsUnallocated()) {
2164 int to_vreg = UnallocatedOperand::cast(to).virtual_register();
2165 TopLevelLiveRange* to_range =
2166 data()->GetOrCreateLiveRangeFor(to_vreg);
2167 if (to_range->is_phi()) {
2169 if (to_range->is_non_loop_phi()) {
2170 hint = to_range->current_hint_position();
2171 hint_type = hint ==
nullptr ? UsePositionHintType::kNone
2172 : UsePositionHintType::kUsePos;
2174 hint_type = UsePositionHintType::kPhi;
2175 hint = data()->GetPhiMapValueFor(to_vreg);
2178 if (live->Contains(to_vreg)) {
2179 to_use = Define(curr_position, &to, &from,
2180 UsePosition::HintTypeForOperand(from));
2181 live->Remove(to_vreg);
2188 Define(curr_position, &to);
2190 UsePosition* from_use =
2191 Use(block_start_position, curr_position, &from, hint, hint_type);
2193 if (from.IsUnallocated()) {
2194 live->Add(UnallocatedOperand::cast(from).virtual_register());
2197 if (to_use !=
nullptr && from_use !=
nullptr) {
2198 to_use->ResolveHint(from_use);
2199 from_use->ResolveHint(to_use);
2201 DCHECK_IMPLIES(to_use !=
nullptr, to_use->IsResolved());
2202 DCHECK_IMPLIES(from_use !=
nullptr, from_use->IsResolved());
2204 if (phi_vreg != -1) ResolvePhiHint(&from, from_use);
2210 void LiveRangeBuilder::ProcessPhis(
const InstructionBlock* block,
2212 for (PhiInstruction* phi : block->phis()) {
2215 int phi_vreg = phi->virtual_register();
2216 live->Remove(phi_vreg);
2228 InstructionOperand* hint =
nullptr;
2229 int hint_preference = 0;
2235 int predecessor_limit = 2;
2237 for (RpoNumber predecessor : block->predecessors()) {
2238 const InstructionBlock* predecessor_block =
2239 code()->InstructionBlockAt(predecessor);
2240 DCHECK_EQ(predecessor_block->rpo_number(), predecessor);
2243 if (predecessor >= block->rpo_number())
continue;
2246 const Instruction* predecessor_instr =
2247 GetLastInstruction(code(), predecessor_block);
2248 InstructionOperand* predecessor_hint =
nullptr;
2251 for (MoveOperands* move :
2252 *predecessor_instr->GetParallelMove(Instruction::END)) {
2253 InstructionOperand& to = move->destination();
2254 if (to.IsUnallocated() &&
2255 UnallocatedOperand::cast(to).virtual_register() == phi_vreg) {
2256 predecessor_hint = &move->source();
2260 DCHECK_NOT_NULL(predecessor_hint);
2265 int predecessor_hint_preference = 0;
2266 const int kNotDeferredBlockPreference = (1 << 2);
2267 const int kMoveIsAllocatedPreference = (1 << 1);
2268 const int kBlockIsEmptyPreference = (1 << 0);
2271 if (!predecessor_block->IsDeferred()) {
2272 predecessor_hint_preference |= kNotDeferredBlockPreference;
2293 auto moves = predecessor_instr->GetParallelMove(Instruction::START);
2294 if (moves !=
nullptr) {
2295 for (MoveOperands* move : *moves) {
2296 InstructionOperand& to = move->destination();
2297 if (predecessor_hint->Equals(to)) {
2298 if (move->source().IsAllocated() || move->source().IsExplicit()) {
2299 predecessor_hint_preference |= kMoveIsAllocatedPreference;
2307 if (predecessor_block->last_instruction_index() ==
2308 predecessor_block->first_instruction_index()) {
2309 predecessor_hint_preference |= kBlockIsEmptyPreference;
2312 if ((hint ==
nullptr) ||
2313 (predecessor_hint_preference > hint_preference)) {
2315 hint = predecessor_hint;
2316 hint_preference = predecessor_hint_preference;
2319 if (--predecessor_limit <= 0)
break;
2321 DCHECK_NOT_NULL(hint);
2323 LifetimePosition block_start = LifetimePosition::GapFromInstructionIndex(
2324 block->first_instruction_index());
2325 UsePosition* use_pos = Define(block_start, &phi->output(), hint,
2326 UsePosition::HintTypeForOperand(*hint));
2327 MapPhiHint(hint, use_pos);
2331 void LiveRangeBuilder::ProcessLoopHeader(
const InstructionBlock* block,
2333 DCHECK(block->IsLoopHeader());
2336 BitVector::Iterator iterator(live);
2337 LifetimePosition start = LifetimePosition::GapFromInstructionIndex(
2338 block->first_instruction_index());
2339 LifetimePosition end = LifetimePosition::GapFromInstructionIndex(
2340 code()->LastLoopInstructionIndex(block))
2342 while (!iterator.Done()) {
2343 int operand_index = iterator.Current();
2344 TopLevelLiveRange* range = data()->GetOrCreateLiveRangeFor(operand_index);
2345 range->EnsureInterval(start, end, allocation_zone());
2349 for (
int i = block->rpo_number().ToInt() + 1;
i < block->loop_end().ToInt();
2351 live_in_sets()[
i]->Union(*live);
2355 void LiveRangeBuilder::BuildLiveRanges() {
2357 for (
int block_id = code()->InstructionBlockCount() - 1; block_id >= 0;
2359 InstructionBlock* block =
2360 code()->InstructionBlockAt(RpoNumber::FromInt(block_id));
2361 BitVector* live = ComputeLiveOut(block, data());
2364 AddInitialIntervals(block, live);
2367 ProcessInstructions(block, live);
2369 ProcessPhis(block, live);
2372 if (block->IsLoopHeader()) ProcessLoopHeader(block, live);
2373 live_in_sets()[block_id] = live;
2376 const size_t live_ranges_size = data()->live_ranges().size();
2377 for (TopLevelLiveRange* range : data()->live_ranges()) {
2378 CHECK_EQ(live_ranges_size,
2379 data()->live_ranges().size());
2380 if (range ==
nullptr)
continue;
2382 if (range->has_slot_use() && range->HasNoSpillType()) {
2383 data()->AssignSpillRangeToLiveRange(range);
2389 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
2390 for (UsePosition* pos = range->first_pos(); pos !=
nullptr;
2391 pos = pos->next()) {
2392 if (pos->type() == UsePositionType::kRequiresSlot ||
2393 pos->type() == UsePositionType::kRegisterOrSlotOrConstant) {
2396 UsePositionType new_type = UsePositionType::kRegisterOrSlot;
2398 if (!pos->pos().IsGapPosition()) {
2399 new_type = UsePositionType::kRequiresRegister;
2401 pos->set_type(new_type,
true);
2405 for (
auto preassigned : data()->preassigned_slot_ranges()) {
2406 TopLevelLiveRange* range = preassigned.first;
2407 int slot_id = preassigned.second;
2408 SpillRange* spill = range->HasSpillRange()
2409 ? range->GetSpillRange()
2410 : data()->AssignSpillRangeToLiveRange(range);
2411 spill->set_assigned_slot(slot_id);
2418 void LiveRangeBuilder::MapPhiHint(InstructionOperand* operand,
2419 UsePosition* use_pos) {
2420 DCHECK(!use_pos->IsResolved());
2421 auto res = phi_hints_.insert(std::make_pair(operand, use_pos));
2426 void LiveRangeBuilder::ResolvePhiHint(InstructionOperand* operand,
2427 UsePosition* use_pos) {
2428 auto it = phi_hints_.find(operand);
2429 if (it == phi_hints_.end())
return;
2430 DCHECK(!it->second->IsResolved());
2431 it->second->ResolveHint(use_pos);
2434 void LiveRangeBuilder::Verify()
const {
2435 for (
auto& hint : phi_hints_) {
2436 CHECK(hint.second->IsResolved());
2438 for (
const TopLevelLiveRange* current : data()->live_ranges()) {
2439 if (current !=
nullptr && !current->IsEmpty()) {
2441 CHECK_NULL(current->next());
2444 const UseInterval* first = current->first_interval();
2445 if (first->next() ==
nullptr)
continue;
2450 CHECK(NextIntervalStartsInDifferentBlocks(first));
2452 for (
const UseInterval*
i = first->next();
i !=
nullptr;
i =
i->next()) {
2455 CHECK(IntervalStartsAtBlockBoundary(
i));
2458 CHECK(IntervalPredecessorsCoveredByRange(
i, current));
2459 if (
i->next() !=
nullptr) {
2462 CHECK(NextIntervalStartsInDifferentBlocks(
i));
2469 bool LiveRangeBuilder::IntervalStartsAtBlockBoundary(
2470 const UseInterval* interval)
const {
2471 LifetimePosition start = interval->start();
2472 if (!start.IsFullStart())
return false;
2473 int instruction_index = start.ToInstructionIndex();
2474 const InstructionBlock* block =
2475 data()->code()->GetInstructionBlock(instruction_index);
2476 return block->first_instruction_index() == instruction_index;
2479 bool LiveRangeBuilder::IntervalPredecessorsCoveredByRange(
2480 const UseInterval* interval,
const TopLevelLiveRange* range)
const {
2481 LifetimePosition start = interval->start();
2482 int instruction_index = start.ToInstructionIndex();
2483 const InstructionBlock* block =
2484 data()->code()->GetInstructionBlock(instruction_index);
2485 for (RpoNumber pred_index : block->predecessors()) {
2486 const InstructionBlock* predecessor =
2487 data()->code()->InstructionBlockAt(pred_index);
2488 LifetimePosition last_pos = LifetimePosition::GapFromInstructionIndex(
2489 predecessor->last_instruction_index());
2490 last_pos = last_pos.NextStart().End();
2491 if (!range->Covers(last_pos))
return false;
2496 bool LiveRangeBuilder::NextIntervalStartsInDifferentBlocks(
2497 const UseInterval* interval)
const {
2498 DCHECK_NOT_NULL(interval->next());
2499 LifetimePosition end = interval->end();
2500 LifetimePosition next_start = interval->next()->start();
2503 end = end.IsStart() ? end.PrevStart().End() : end.Start();
2504 int last_covered_index = end.ToInstructionIndex();
2505 const InstructionBlock* block =
2506 data()->code()->GetInstructionBlock(last_covered_index);
2507 const InstructionBlock* next_block =
2508 data()->code()->GetInstructionBlock(next_start.ToInstructionIndex());
2509 return block->rpo_number() < next_block->rpo_number();
2512 RegisterAllocator::RegisterAllocator(RegisterAllocationData* data,
2516 num_registers_(GetRegisterCount(data->config(), kind)),
2517 num_allocatable_registers_(
2518 GetAllocatableRegisterCount(data->config(), kind)),
2519 allocatable_register_codes_(
2520 GetAllocatableRegisterCodes(data->config(), kind)),
2521 check_fp_aliasing_(false) {
2522 if (!kSimpleFPAliasing && kind == FP_REGISTERS) {
2523 check_fp_aliasing_ = (data->code()->representation_mask() &
2524 (kFloat32Bit | kSimd128Bit)) != 0;
2528 LifetimePosition RegisterAllocator::GetSplitPositionForInstruction(
2529 const LiveRange* range,
int instruction_index) {
2530 LifetimePosition ret = LifetimePosition::Invalid();
2532 ret = LifetimePosition::GapFromInstructionIndex(instruction_index);
2533 if (range->Start() >= ret || ret >= range->End()) {
2534 return LifetimePosition::Invalid();
2539 void RegisterAllocator::SplitAndSpillRangesDefinedByMemoryOperand() {
2540 size_t initial_range_count = data()->live_ranges().size();
2541 for (
size_t i = 0;
i < initial_range_count; ++
i) {
2542 CHECK_EQ(initial_range_count,
2543 data()->live_ranges().size());
2544 TopLevelLiveRange* range = data()->live_ranges()[
i];
2545 if (!CanProcessRange(range))
continue;
2546 if (range->HasNoSpillType() ||
2547 (range->HasSpillRange() && !range->has_slot_use())) {
2550 LifetimePosition start = range->Start();
2551 TRACE(
"Live range %d:%d is defined by a spill operand.\n",
2552 range->TopLevel()->vreg(), range->relative_id());
2553 LifetimePosition next_pos = start;
2554 if (next_pos.IsGapPosition()) {
2555 next_pos = next_pos.NextStart();
2562 ? range->NextRegisterPosition(next_pos)
2563 : range->NextUsePositionRegisterIsBeneficial(next_pos);
2566 if (pos ==
nullptr) {
2568 }
else if (pos->pos() > range->Start().NextStart()) {
2571 LifetimePosition split_pos = GetSplitPositionForInstruction(
2572 range, pos->pos().ToInstructionIndex());
2574 if (!split_pos.IsValid())
continue;
2577 FindOptimalSplitPos(range->Start().NextFullStart(), split_pos);
2579 SplitRangeAt(range, split_pos);
2585 LiveRange* RegisterAllocator::SplitRangeAt(LiveRange* range,
2586 LifetimePosition pos) {
2587 DCHECK(!range->TopLevel()->IsFixed());
2588 TRACE(
"Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(),
2589 range->relative_id(), pos.value());
2591 if (pos <= range->Start())
return range;
2595 DCHECK(pos.IsStart() || pos.IsGapPosition() ||
2596 (GetInstructionBlock(code(), pos)->last_instruction_index() !=
2597 pos.ToInstructionIndex()));
2599 LiveRange* result = range->SplitAt(pos, allocation_zone());
2603 LiveRange* RegisterAllocator::SplitBetween(LiveRange* range,
2604 LifetimePosition start,
2605 LifetimePosition end) {
2606 DCHECK(!range->TopLevel()->IsFixed());
2607 TRACE(
"Splitting live range %d:%d in position between [%d, %d]\n",
2608 range->TopLevel()->vreg(), range->relative_id(), start.value(),
2611 LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2612 DCHECK(split_pos >= start);
2613 return SplitRangeAt(range, split_pos);
2616 LifetimePosition RegisterAllocator::FindOptimalSplitPos(LifetimePosition start,
2617 LifetimePosition end) {
2618 int start_instr = start.ToInstructionIndex();
2619 int end_instr = end.ToInstructionIndex();
2620 DCHECK_LE(start_instr, end_instr);
2623 if (start_instr == end_instr)
return end;
2625 const InstructionBlock* start_block = GetInstructionBlock(code(), start);
2626 const InstructionBlock* end_block = GetInstructionBlock(code(), end);
2628 if (end_block == start_block) {
2634 const InstructionBlock* block = end_block;
2637 const InstructionBlock* loop = GetContainingLoop(code(), block);
2638 if (loop ==
nullptr ||
2639 loop->rpo_number().ToInt() <= start_block->rpo_number().ToInt()) {
2648 if (block == end_block && !end_block->IsLoopHeader())
return end;
2650 return LifetimePosition::GapFromInstructionIndex(
2651 block->first_instruction_index());
2654 LifetimePosition RegisterAllocator::FindOptimalSpillingPos(
2655 LiveRange* range, LifetimePosition pos) {
2656 const InstructionBlock* block = GetInstructionBlock(code(), pos.Start());
2657 const InstructionBlock* loop_header =
2658 block->IsLoopHeader() ? block : GetContainingLoop(code(), block);
2660 if (loop_header ==
nullptr)
return pos;
2662 const UsePosition* prev_use =
2663 range->PreviousUsePositionRegisterIsBeneficial(pos);
2665 while (loop_header !=
nullptr) {
2669 LifetimePosition loop_start = LifetimePosition::GapFromInstructionIndex(
2670 loop_header->first_instruction_index());
2672 if (range->Covers(loop_start)) {
2673 if (prev_use ==
nullptr || prev_use->pos() < loop_start) {
2680 loop_header = GetContainingLoop(code(), loop_header);
2686 void RegisterAllocator::Spill(LiveRange* range) {
2687 DCHECK(!range->spilled());
2688 TopLevelLiveRange* first = range->TopLevel();
2689 TRACE(
"Spilling live range %d:%d\n", first->vreg(), range->relative_id());
2691 if (first->HasNoSpillType()) {
2692 data()->AssignSpillRangeToLiveRange(first);
2697 const char* RegisterAllocator::RegisterName(
int register_code)
const {
2698 return mode() == GENERAL_REGISTERS
2699 ? i::RegisterName(Register::from_code(register_code))
2700 :
i::RegisterName(DoubleRegister::from_code(register_code));
2703 LinearScanAllocator::LinearScanAllocator(RegisterAllocationData* data,
2704 RegisterKind kind, Zone* local_zone)
2705 : RegisterAllocator(data, kind),
2706 unhandled_live_ranges_(local_zone),
2707 active_live_ranges_(local_zone),
2708 inactive_live_ranges_(local_zone) {
2709 active_live_ranges().reserve(8);
2710 inactive_live_ranges().reserve(8);
2713 DCHECK_GE(RegisterConfiguration::kMaxFPRegisters,
2714 this->data()->config()->num_general_registers());
2717 void LinearScanAllocator::AllocateRegisters() {
2718 DCHECK(unhandled_live_ranges().empty());
2719 DCHECK(active_live_ranges().empty());
2720 DCHECK(inactive_live_ranges().empty());
2722 SplitAndSpillRangesDefinedByMemoryOperand();
2724 const size_t live_ranges_size = data()->live_ranges().size();
2725 for (TopLevelLiveRange* range : data()->live_ranges()) {
2726 CHECK_EQ(live_ranges_size,
2727 data()->live_ranges().size());
2728 if (!CanProcessRange(range))
continue;
2729 for (LiveRange* to_add = range; to_add !=
nullptr;
2730 to_add = to_add->next()) {
2731 if (!to_add->spilled()) {
2732 AddToUnhandled(to_add);
2737 if (mode() == GENERAL_REGISTERS) {
2738 for (TopLevelLiveRange* current : data()->fixed_live_ranges()) {
2739 if (current !=
nullptr) AddToInactive(current);
2742 for (TopLevelLiveRange* current : data()->fixed_double_live_ranges()) {
2743 if (current !=
nullptr) AddToInactive(current);
2745 if (!kSimpleFPAliasing && check_fp_aliasing()) {
2746 for (TopLevelLiveRange* current : data()->fixed_float_live_ranges()) {
2747 if (current !=
nullptr) AddToInactive(current);
2749 for (TopLevelLiveRange* current : data()->fixed_simd128_live_ranges()) {
2750 if (current !=
nullptr) AddToInactive(current);
2755 while (!unhandled_live_ranges().empty()) {
2756 LiveRange* current = *unhandled_live_ranges().begin();
2757 unhandled_live_ranges().erase(unhandled_live_ranges().begin());
2758 LifetimePosition position = current->Start();
2760 allocation_finger_ = position;
2762 TRACE(
"Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
2763 current->relative_id(), position.value());
2765 if (current->IsTopLevel() && TryReuseSpillForPhi(current->TopLevel()))
2768 for (
auto it = active_live_ranges().begin();
2769 it != active_live_ranges().end();) {
2770 LiveRange* cur_active = *it;
2771 if (cur_active->End() <= position) {
2772 it = ActiveToHandled(it);
2773 }
else if (!cur_active->Covers(position)) {
2774 it = ActiveToInactive(it);
2780 for (
auto it = inactive_live_ranges().begin();
2781 it != inactive_live_ranges().end();) {
2782 LiveRange* cur_inactive = *it;
2783 if (cur_inactive->End() <= position) {
2784 it = InactiveToHandled(it);
2785 }
else if (cur_inactive->Covers(position)) {
2786 it = InactiveToActive(it);
2792 DCHECK(!current->HasRegisterAssigned() && !current->spilled());
2794 ProcessCurrentRange(current);
2797 if (FLAG_trace_alloc) {
2798 PrintRangeOverview(std::cout);
2802 bool LinearScanAllocator::TrySplitAndSpillSplinter(LiveRange* range) {
2803 DCHECK(range->TopLevel()->IsSplinter());
2806 const UsePosition* next_reg = range->NextRegisterPosition(range->Start());
2807 if (next_reg ==
nullptr) {
2810 }
else if (range->FirstHintPosition() ==
nullptr) {
2814 }
else if (next_reg->pos().PrevStart() > range->Start()) {
2815 LiveRange* tail = SplitRangeAt(range, next_reg->pos().PrevStart());
2816 AddToUnhandled(tail);
2823 void LinearScanAllocator::SetLiveRangeAssignedRegister(LiveRange* range,
2825 data()->MarkAllocated(range->representation(), reg);
2826 range->set_assigned_register(reg);
2827 range->SetUseHints(reg);
2828 if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
2829 data()->GetPhiMapValueFor(range->TopLevel())->set_assigned_register(reg);
2833 void LinearScanAllocator::AddToActive(LiveRange* range) {
2834 TRACE(
"Add live range %d:%d to active\n", range->TopLevel()->vreg(),
2835 range->relative_id());
2836 active_live_ranges().push_back(range);
2839 void LinearScanAllocator::AddToInactive(LiveRange* range) {
2840 TRACE(
"Add live range %d:%d to inactive\n", range->TopLevel()->vreg(),
2841 range->relative_id());
2842 inactive_live_ranges().push_back(range);
2845 void LinearScanAllocator::AddToUnhandled(LiveRange* range) {
2846 if (range ==
nullptr || range->IsEmpty())
return;
2847 DCHECK(!range->HasRegisterAssigned() && !range->spilled());
2848 DCHECK(allocation_finger_ <= range->Start());
2850 TRACE(
"Add live range %d:%d to unhandled\n", range->TopLevel()->vreg(),
2851 range->relative_id());
2852 unhandled_live_ranges().insert(range);
2855 ZoneVector<LiveRange*>::iterator LinearScanAllocator::ActiveToHandled(
2856 const ZoneVector<LiveRange*>::iterator it) {
2857 TRACE(
"Moving live range %d:%d from active to handled\n",
2858 (*it)->TopLevel()->vreg(), (*it)->relative_id());
2859 return active_live_ranges().erase(it);
2862 ZoneVector<LiveRange*>::iterator LinearScanAllocator::ActiveToInactive(
2863 const ZoneVector<LiveRange*>::iterator it) {
2864 LiveRange* range = *it;
2865 inactive_live_ranges().push_back(range);
2866 TRACE(
"Moving live range %d:%d from active to inactive\n",
2867 (range)->TopLevel()->vreg(), range->relative_id());
2868 return active_live_ranges().erase(it);
2871 ZoneVector<LiveRange*>::iterator LinearScanAllocator::InactiveToHandled(
2872 ZoneVector<LiveRange*>::iterator it) {
2873 TRACE(
"Moving live range %d:%d from inactive to handled\n",
2874 (*it)->TopLevel()->vreg(), (*it)->relative_id());
2875 return inactive_live_ranges().erase(it);
2878 ZoneVector<LiveRange*>::iterator LinearScanAllocator::InactiveToActive(
2879 ZoneVector<LiveRange*>::iterator it) {
2880 LiveRange* range = *it;
2881 active_live_ranges().push_back(range);
2882 TRACE(
"Moving live range %d:%d from inactive to active\n",
2883 range->TopLevel()->vreg(), range->relative_id());
2884 return inactive_live_ranges().erase(it);
2887 void LinearScanAllocator::GetFPRegisterSet(MachineRepresentation rep,
2888 int* num_regs,
int* num_codes,
2889 const int** codes)
const {
2890 DCHECK(!kSimpleFPAliasing);
2891 if (rep == MachineRepresentation::kFloat32) {
2892 *num_regs = data()->config()->num_float_registers();
2893 *num_codes = data()->config()->num_allocatable_float_registers();
2894 *codes = data()->config()->allocatable_float_codes();
2895 }
else if (rep == MachineRepresentation::kSimd128) {
2896 *num_regs = data()->config()->num_simd128_registers();
2897 *num_codes = data()->config()->num_allocatable_simd128_registers();
2898 *codes = data()->config()->allocatable_simd128_codes();
2904 void LinearScanAllocator::FindFreeRegistersForRange(
2905 LiveRange* range, Vector<LifetimePosition> positions) {
2906 int num_regs = num_registers();
2907 int num_codes = num_allocatable_registers();
2908 const int* codes = allocatable_register_codes();
2909 MachineRepresentation rep = range->representation();
2910 if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
2911 rep == MachineRepresentation::kSimd128))
2912 GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
2913 DCHECK_GE(positions.length(), num_regs);
2915 for (
int i = 0;
i < num_regs; ++
i) {
2916 positions[
i] = LifetimePosition::MaxPosition();
2919 for (LiveRange* cur_active : active_live_ranges()) {
2920 int cur_reg = cur_active->assigned_register();
2921 if (kSimpleFPAliasing || !check_fp_aliasing()) {
2922 positions[cur_reg] = LifetimePosition::GapFromInstructionIndex(0);
2923 TRACE(
"Register %s is free until pos %d (1)\n", RegisterName(cur_reg),
2924 LifetimePosition::GapFromInstructionIndex(0).value());
2926 int alias_base_index = -1;
2927 int aliases = data()->config()->GetAliases(
2928 cur_active->representation(), cur_reg, rep, &alias_base_index);
2929 DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
2931 int aliased_reg = alias_base_index + aliases;
2932 positions[aliased_reg] = LifetimePosition::GapFromInstructionIndex(0);
2937 for (LiveRange* cur_inactive : inactive_live_ranges()) {
2938 DCHECK(cur_inactive->End() > range->Start());
2939 int cur_reg = cur_inactive->assigned_register();
2943 if ((kSimpleFPAliasing || !check_fp_aliasing()) &&
2944 positions[cur_reg] < range->Start()) {
2948 LifetimePosition next_intersection = cur_inactive->FirstIntersection(range);
2949 if (!next_intersection.IsValid())
continue;
2950 if (kSimpleFPAliasing || !check_fp_aliasing()) {
2951 positions[cur_reg] = Min(positions[cur_reg], next_intersection);
2952 TRACE(
"Register %s is free until pos %d (2)\n", RegisterName(cur_reg),
2953 Min(positions[cur_reg], next_intersection).value());
2955 int alias_base_index = -1;
2956 int aliases = data()->config()->GetAliases(
2957 cur_inactive->representation(), cur_reg, rep, &alias_base_index);
2958 DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
2960 int aliased_reg = alias_base_index + aliases;
2961 positions[aliased_reg] = Min(positions[aliased_reg], next_intersection);
3002 void LinearScanAllocator::ProcessCurrentRange(LiveRange* current) {
3003 LifetimePosition free_until_pos_buff[RegisterConfiguration::kMaxFPRegisters];
3004 Vector<LifetimePosition> free_until_pos(
3005 free_until_pos_buff, RegisterConfiguration::kMaxFPRegisters);
3006 FindFreeRegistersForRange(current, free_until_pos);
3007 if (!TryAllocatePreferredReg(current, free_until_pos)) {
3008 if (current->TopLevel()->IsSplinter()) {
3009 if (TrySplitAndSpillSplinter(current))
return;
3011 if (!TryAllocateFreeReg(current, free_until_pos)) {
3012 AllocateBlockedReg(current);
3015 if (current->HasRegisterAssigned()) {
3016 AddToActive(current);
3020 bool LinearScanAllocator::TryAllocatePreferredReg(
3021 LiveRange* current,
const Vector<LifetimePosition>& free_until_pos) {
3023 if (current->FirstHintPosition(&hint_register) !=
nullptr) {
3025 "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
3026 RegisterName(hint_register), free_until_pos[hint_register].value(),
3027 current->TopLevel()->vreg(), current->relative_id(),
3028 current->End().value());
3031 if (free_until_pos[hint_register] >= current->End()) {
3032 TRACE(
"Assigning preferred reg %s to live range %d:%d\n",
3033 RegisterName(hint_register), current->TopLevel()->vreg(),
3034 current->relative_id());
3035 SetLiveRangeAssignedRegister(current, hint_register);
3042 bool LinearScanAllocator::TryAllocateFreeReg(
3043 LiveRange* current,
const Vector<LifetimePosition>& free_until_pos) {
3045 int num_codes = num_allocatable_registers();
3046 const int* codes = allocatable_register_codes();
3047 MachineRepresentation rep = current->representation();
3048 if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
3049 rep == MachineRepresentation::kSimd128)) {
3050 GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
3053 DCHECK_GE(free_until_pos.length(), num_codes);
3064 if (current->FirstHintPosition(®) ==
nullptr) {
3067 for (
int i = 0;
i < num_codes; ++
i) {
3068 int code = codes[
i];
3069 if (free_until_pos[code].ToInstructionIndex() >
3070 free_until_pos[reg].ToInstructionIndex()) {
3075 LifetimePosition pos = free_until_pos[reg];
3077 if (pos <= current->Start()) {
3082 if (pos < current->End()) {
3085 LiveRange* tail = SplitRangeAt(current, pos);
3086 AddToUnhandled(tail);
3089 if (TryAllocatePreferredReg(current, free_until_pos))
return true;
3094 DCHECK(pos >= current->End());
3095 TRACE(
"Assigning free reg %s to live range %d:%d\n", RegisterName(reg),
3096 current->TopLevel()->vreg(), current->relative_id());
3097 SetLiveRangeAssignedRegister(current, reg);
3102 void LinearScanAllocator::AllocateBlockedReg(LiveRange* current) {
3103 UsePosition* register_use = current->NextRegisterPosition(current->Start());
3104 if (register_use ==
nullptr) {
3111 int num_regs = num_registers();
3112 int num_codes = num_allocatable_registers();
3113 const int* codes = allocatable_register_codes();
3114 MachineRepresentation rep = current->representation();
3115 if (!kSimpleFPAliasing && (rep == MachineRepresentation::kFloat32 ||
3116 rep == MachineRepresentation::kSimd128))
3117 GetFPRegisterSet(rep, &num_regs, &num_codes, &codes);
3122 LifetimePosition use_pos[RegisterConfiguration::kMaxFPRegisters];
3123 LifetimePosition block_pos[RegisterConfiguration::kMaxFPRegisters];
3124 for (
int i = 0;
i < num_regs;
i++) {
3125 use_pos[
i] = block_pos[
i] = LifetimePosition::MaxPosition();
3128 for (LiveRange* range : active_live_ranges()) {
3129 int cur_reg = range->assigned_register();
3130 bool is_fixed_or_cant_spill =
3131 range->TopLevel()->IsFixed() || !range->CanBeSpilled(current->Start());
3132 if (kSimpleFPAliasing || !check_fp_aliasing()) {
3133 if (is_fixed_or_cant_spill) {
3134 block_pos[cur_reg] = use_pos[cur_reg] =
3135 LifetimePosition::GapFromInstructionIndex(0);
3137 DCHECK_NE(LifetimePosition::GapFromInstructionIndex(0),
3138 block_pos[cur_reg]);
3140 range->NextLifetimePositionRegisterIsBeneficial(current->Start());
3143 int alias_base_index = -1;
3144 int aliases = data()->config()->GetAliases(
3145 range->representation(), cur_reg, rep, &alias_base_index);
3146 DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3148 int aliased_reg = alias_base_index + aliases;
3149 if (is_fixed_or_cant_spill) {
3150 block_pos[aliased_reg] = use_pos[aliased_reg] =
3151 LifetimePosition::GapFromInstructionIndex(0);
3153 use_pos[aliased_reg] =
3154 Min(block_pos[aliased_reg],
3155 range->NextLifetimePositionRegisterIsBeneficial(
3162 for (LiveRange* range : inactive_live_ranges()) {
3163 DCHECK(range->End() > current->Start());
3164 int cur_reg = range->assigned_register();
3165 bool is_fixed = range->TopLevel()->IsFixed();
3170 if ((kSimpleFPAliasing || !check_fp_aliasing())) {
3172 if (block_pos[cur_reg] < range->Start())
continue;
3174 if (use_pos[cur_reg] < range->Start())
continue;
3178 LifetimePosition next_intersection = range->FirstIntersection(current);
3179 if (!next_intersection.IsValid())
continue;
3181 if (kSimpleFPAliasing || !check_fp_aliasing()) {
3183 block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
3184 use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
3186 use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
3189 int alias_base_index = -1;
3190 int aliases = data()->config()->GetAliases(
3191 range->representation(), cur_reg, rep, &alias_base_index);
3192 DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
3194 int aliased_reg = alias_base_index + aliases;
3196 block_pos[aliased_reg] =
3197 Min(block_pos[aliased_reg], next_intersection);
3198 use_pos[aliased_reg] =
3199 Min(block_pos[aliased_reg], use_pos[aliased_reg]);
3201 use_pos[aliased_reg] = Min(use_pos[aliased_reg], next_intersection);
3208 for (
int i = 1;
i < num_codes; ++
i) {
3209 int code = codes[
i];
3210 if (use_pos[code] > use_pos[reg]) {
3215 if (use_pos[reg] < register_use->pos()) {
3218 if (LifetimePosition::ExistsGapPositionBetween(current->Start(),
3219 register_use->pos())) {
3220 SpillBetween(current, current->Start(), register_use->pos());
3227 if (block_pos[reg] < current->End()) {
3231 SplitBetween(current, current->Start(), block_pos[reg].Start());
3232 AddToUnhandled(tail);
3236 DCHECK(block_pos[reg] >= current->End());
3237 TRACE(
"Assigning blocked reg %s to live range %d:%d\n", RegisterName(reg),
3238 current->TopLevel()->vreg(), current->relative_id());
3239 SetLiveRangeAssignedRegister(current, reg);
3244 SplitAndSpillIntersecting(current);
3247 void LinearScanAllocator::SplitAndSpillIntersecting(LiveRange* current) {
3248 DCHECK(current->HasRegisterAssigned());
3249 int reg = current->assigned_register();
3250 LifetimePosition split_pos = current->Start();
3251 for (
auto it = active_live_ranges().begin();
3252 it != active_live_ranges().end();) {
3253 LiveRange* range = *it;
3254 if (kSimpleFPAliasing || !check_fp_aliasing()) {
3255 if (range->assigned_register() != reg) {
3260 if (!data()->config()->AreAliases(current->representation(), reg,
3261 range->representation(),
3262 range->assigned_register())) {
3268 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
3269 LifetimePosition spill_pos = FindOptimalSpillingPos(range, split_pos);
3270 if (next_pos ==
nullptr) {
3271 SpillAfter(range, spill_pos);
3281 DCHECK(LifetimePosition::ExistsGapPositionBetween(current->Start(),
3283 SpillBetweenUntil(range, spill_pos, current->Start(), next_pos->pos());
3285 it = ActiveToHandled(it);
3288 for (
auto it = inactive_live_ranges().begin();
3289 it != inactive_live_ranges().end();) {
3290 LiveRange* range = *it;
3291 DCHECK(range->End() > current->Start());
3292 if (range->TopLevel()->IsFixed()) {
3296 if (kSimpleFPAliasing || !check_fp_aliasing()) {
3297 if (range->assigned_register() != reg) {
3302 if (!data()->config()->AreAliases(current->representation(), reg,
3303 range->representation(),
3304 range->assigned_register())) {
3310 LifetimePosition next_intersection = range->FirstIntersection(current);
3311 if (next_intersection.IsValid()) {
3312 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
3313 if (next_pos ==
nullptr) {
3314 SpillAfter(range, split_pos);
3316 next_intersection = Min(next_intersection, next_pos->pos());
3317 SpillBetween(range, split_pos, next_intersection);
3319 it = InactiveToHandled(it);
3326 bool LinearScanAllocator::TryReuseSpillForPhi(TopLevelLiveRange* range) {
3327 if (!range->is_phi())
return false;
3329 DCHECK(!range->HasSpillOperand());
3330 RegisterAllocationData::PhiMapValue* phi_map_value =
3331 data()->GetPhiMapValueFor(range);
3332 const PhiInstruction* phi = phi_map_value->phi();
3333 const InstructionBlock* block = phi_map_value->block();
3335 size_t spilled_count = 0;
3336 LiveRange* first_op =
nullptr;
3337 for (
size_t i = 0;
i < phi->operands().size();
i++) {
3338 int op = phi->operands()[
i];
3339 LiveRange* op_range = data()->GetOrCreateLiveRangeFor(op);
3340 if (!op_range->TopLevel()->HasSpillRange())
continue;
3341 const InstructionBlock* pred =
3342 code()->InstructionBlockAt(block->predecessors()[
i]);
3343 LifetimePosition pred_end =
3344 LifetimePosition::InstructionFromInstructionIndex(
3345 pred->last_instruction_index());
3346 while (op_range !=
nullptr && !op_range->CanCover(pred_end)) {
3347 op_range = op_range->next();
3349 if (op_range !=
nullptr && op_range->spilled()) {
3351 if (first_op ==
nullptr) {
3352 first_op = op_range->TopLevel();
3358 if (spilled_count * 2 <= phi->operands().size()) {
3364 DCHECK_NOT_NULL(first_op);
3365 SpillRange* first_op_spill = first_op->TopLevel()->GetSpillRange();
3366 size_t num_merged = 1;
3367 for (
size_t i = 1;
i < phi->operands().size();
i++) {
3368 int op = phi->operands()[
i];
3369 TopLevelLiveRange* op_range = data()->live_ranges()[op];
3370 if (!op_range->HasSpillRange())
continue;
3371 SpillRange* op_spill = op_range->GetSpillRange();
3372 if (op_spill == first_op_spill || first_op_spill->TryMerge(op_spill)) {
3379 if (num_merged * 2 <= phi->operands().size() ||
3380 AreUseIntervalsIntersecting(first_op_spill->interval(),
3381 range->first_interval())) {
3387 LifetimePosition next_pos = range->Start();
3388 if (next_pos.IsGapPosition()) next_pos = next_pos.NextStart();
3389 UsePosition* pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
3390 if (pos ==
nullptr) {
3391 SpillRange* spill_range =
3392 range->TopLevel()->HasSpillRange()
3393 ? range->TopLevel()->GetSpillRange()
3394 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
3395 bool merged = first_op_spill->TryMerge(spill_range);
3396 if (!merged)
return false;
3399 }
else if (pos->pos() > range->Start().NextStart()) {
3400 SpillRange* spill_range =
3401 range->TopLevel()->HasSpillRange()
3402 ? range->TopLevel()->GetSpillRange()
3403 : data()->AssignSpillRangeToLiveRange(range->TopLevel());
3404 bool merged = first_op_spill->TryMerge(spill_range);
3405 if (!merged)
return false;
3406 SpillBetween(range, range->Start(), pos->pos());
3412 void LinearScanAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
3413 LiveRange* second_part = SplitRangeAt(range, pos);
3417 void LinearScanAllocator::SpillBetween(LiveRange* range, LifetimePosition start,
3418 LifetimePosition end) {
3419 SpillBetweenUntil(range, start, start, end);
3422 void LinearScanAllocator::SpillBetweenUntil(LiveRange* range,
3423 LifetimePosition start,
3424 LifetimePosition until,
3425 LifetimePosition end) {
3427 LiveRange* second_part = SplitRangeAt(range, start);
3429 if (second_part->Start() < end) {
3433 LifetimePosition third_part_end = end.PrevStart().End();
3434 if (data()->IsBlockBoundary(end.Start())) {
3435 third_part_end = end.Start();
3437 LiveRange* third_part = SplitBetween(
3438 second_part, Max(second_part->Start().End(), until), third_part_end);
3440 DCHECK(third_part != second_part);
3443 AddToUnhandled(third_part);
3447 AddToUnhandled(second_part);
3451 SpillSlotLocator::SpillSlotLocator(RegisterAllocationData* data)
3454 void SpillSlotLocator::LocateSpillSlots() {
3455 const InstructionSequence* code = data()->code();
3456 const size_t live_ranges_size = data()->live_ranges().size();
3457 for (TopLevelLiveRange* range : data()->live_ranges()) {
3458 CHECK_EQ(live_ranges_size,
3459 data()->live_ranges().size());
3460 if (range ==
nullptr || range->IsEmpty())
continue;
3462 if (!range->HasSpillRange() || range->IsSpilledOnlyInDeferredBlocks()) {
3465 TopLevelLiveRange::SpillMoveInsertionList* spills =
3466 range->GetSpillMoveInsertionLocations();
3467 DCHECK_NOT_NULL(spills);
3468 for (; spills !=
nullptr; spills = spills->next) {
3469 code->GetInstructionBlock(spills->gap_index)->mark_needs_frame();
3474 OperandAssigner::OperandAssigner(RegisterAllocationData* data) : data_(data) {}
3476 void OperandAssigner::AssignSpillSlots() {
3477 ZoneVector<SpillRange*>& spill_ranges = data()->spill_ranges();
3479 for (
size_t i = 0;
i < spill_ranges.size(); ++
i) {
3480 SpillRange* range = spill_ranges[
i];
3481 if (range ==
nullptr)
continue;
3482 if (range->IsEmpty())
continue;
3483 for (
size_t j =
i + 1; j < spill_ranges.size(); ++j) {
3484 SpillRange* other = spill_ranges[j];
3485 if (other !=
nullptr && !other->IsEmpty()) {
3486 range->TryMerge(other);
3491 for (SpillRange* range : spill_ranges) {
3492 if (range ==
nullptr || range->IsEmpty())
continue;
3494 if (!range->HasSlot()) {
3495 int index = data()->frame()->AllocateSpillSlot(range->byte_width());
3496 range->set_assigned_slot(index);
3501 void OperandAssigner::CommitAssignment() {
3502 const size_t live_ranges_size = data()->live_ranges().size();
3503 for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3504 CHECK_EQ(live_ranges_size,
3505 data()->live_ranges().size());
3506 if (top_range ==
nullptr || top_range->IsEmpty())
continue;
3507 InstructionOperand spill_operand;
3508 if (top_range->HasSpillOperand()) {
3509 spill_operand = *top_range->TopLevel()->GetSpillOperand();
3510 }
else if (top_range->TopLevel()->HasSpillRange()) {
3511 spill_operand = top_range->TopLevel()->GetSpillRangeOperand();
3513 if (top_range->is_phi()) {
3514 data()->GetPhiMapValueFor(top_range)->CommitAssignment(
3515 top_range->GetAssignedOperand());
3517 for (LiveRange* range = top_range; range !=
nullptr;
3518 range = range->next()) {
3519 InstructionOperand assigned = range->GetAssignedOperand();
3520 DCHECK(!assigned.IsUnallocated());
3521 range->ConvertUsesToOperand(assigned, spill_operand);
3524 if (!spill_operand.IsInvalid()) {
3535 if (!top_range->IsSpilledOnlyInDeferredBlocks()) {
3538 top_range->CommitSpillMoves(
3539 data()->code(), spill_operand,
3540 top_range->has_slot_use() || top_range->spilled());
3546 ReferenceMapPopulator::ReferenceMapPopulator(RegisterAllocationData* data)
3549 bool ReferenceMapPopulator::SafePointsAreInOrder()
const {
3551 for (ReferenceMap* map : *data()->code()->reference_maps()) {
3552 if (safe_point > map->instruction_position())
return false;
3553 safe_point = map->instruction_position();
3558 void ReferenceMapPopulator::PopulateReferenceMaps() {
3559 DCHECK(SafePointsAreInOrder());
3561 for (RegisterAllocationData::DelayedReference& delayed_reference :
3562 data()->delayed_references()) {
3563 delayed_reference.map->RecordReference(
3564 AllocatedOperand::cast(*delayed_reference.operand));
3568 int last_range_start = 0;
3569 const ReferenceMapDeque* reference_maps = data()->code()->reference_maps();
3570 ReferenceMapDeque::const_iterator first_it = reference_maps->begin();
3571 const size_t live_ranges_size = data()->live_ranges().size();
3572 for (TopLevelLiveRange* range : data()->live_ranges()) {
3573 CHECK_EQ(live_ranges_size,
3574 data()->live_ranges().size());
3575 if (range ==
nullptr)
continue;
3577 if (!data()->IsReference(range))
continue;
3579 if (range->IsEmpty())
continue;
3580 if (range->has_preassigned_slot())
continue;
3583 int start = range->Start().ToInstructionIndex();
3585 for (LiveRange* cur = range; cur !=
nullptr; cur = cur->next()) {
3586 LifetimePosition this_end = cur->End();
3587 if (this_end.ToInstructionIndex() > end)
3588 end = this_end.ToInstructionIndex();
3589 DCHECK(cur->Start().ToInstructionIndex() >= start);
3594 if (start < last_range_start) first_it = reference_maps->begin();
3595 last_range_start = start;
3599 for (; first_it != reference_maps->end(); ++first_it) {
3600 ReferenceMap* map = *first_it;
3601 if (map->instruction_position() >= start)
break;
3604 InstructionOperand spill_operand;
3605 if (((range->HasSpillOperand() &&
3606 !range->GetSpillOperand()->IsConstant()) ||
3607 range->HasSpillRange())) {
3608 if (range->HasSpillOperand()) {
3609 spill_operand = *range->GetSpillOperand();
3611 spill_operand = range->GetSpillRangeOperand();
3613 DCHECK(spill_operand.IsStackSlot());
3614 DCHECK(CanBeTaggedPointer(
3615 AllocatedOperand::cast(spill_operand).representation()));
3618 LiveRange* cur = range;
3620 for (
auto it = first_it; it != reference_maps->end(); ++it) {
3621 ReferenceMap* map = *it;
3622 int safe_point = map->instruction_position();
3625 if (safe_point - 1 > end)
break;
3629 LifetimePosition safe_point_pos =
3630 LifetimePosition::InstructionFromInstructionIndex(safe_point);
3638 DCHECK_NOT_NULL(cur);
3639 DCHECK(safe_point_pos >= cur->Start() || range == cur);
3642 if (cur->Covers(safe_point_pos)) {
3645 LiveRange* next = cur->next();
3646 if (next ==
nullptr || next->Start() > safe_point_pos) {
3659 int spill_index = range->IsSpilledOnlyInDeferredBlocks()
3660 ? cur->Start().ToInstructionIndex()
3661 : range->spill_start_index();
3663 if (!spill_operand.IsInvalid() && safe_point >= spill_index) {
3664 TRACE(
"Pointer for range %d (spilled at %d) at safe point %d\n",
3665 range->vreg(), spill_index, safe_point);
3666 map->RecordReference(AllocatedOperand::cast(spill_operand));
3669 if (!cur->spilled()) {
3671 "Pointer in register for range %d:%d (start at %d) " 3672 "at safe point %d\n",
3673 range->vreg(), cur->relative_id(), cur->Start().value(),
3675 InstructionOperand operand = cur->GetAssignedOperand();
3676 DCHECK(!operand.IsStackSlot());
3677 DCHECK(CanBeTaggedPointer(
3678 AllocatedOperand::cast(operand).representation()));
3679 map->RecordReference(AllocatedOperand::cast(operand));
3685 LiveRangeConnector::LiveRangeConnector(RegisterAllocationData* data)
3688 bool LiveRangeConnector::CanEagerlyResolveControlFlow(
3689 const InstructionBlock* block)
const {
3690 if (block->PredecessorCount() != 1)
return false;
3691 return block->predecessors()[0].IsNext(block->rpo_number());
3694 void LiveRangeConnector::ResolveControlFlow(Zone* local_zone) {
3696 LiveRangeFinder finder(data(), local_zone);
3697 ZoneVector<BitVector*>& live_in_sets = data()->live_in_sets();
3698 for (
const InstructionBlock* block : code()->instruction_blocks()) {
3699 if (CanEagerlyResolveControlFlow(block))
continue;
3700 BitVector* live = live_in_sets[block->rpo_number().ToInt()];
3701 BitVector::Iterator iterator(live);
3702 while (!iterator.Done()) {
3703 int vreg = iterator.Current();
3704 LiveRangeBoundArray* array = finder.ArrayFor(vreg);
3705 for (
const RpoNumber& pred : block->predecessors()) {
3707 const InstructionBlock* pred_block = code()->InstructionBlockAt(pred);
3708 if (!array->FindConnectableSubranges(block, pred_block, &result)) {
3711 InstructionOperand pred_op = result.pred_cover_->GetAssignedOperand();
3712 InstructionOperand cur_op = result.cur_cover_->GetAssignedOperand();
3713 if (pred_op.Equals(cur_op))
continue;
3714 if (!pred_op.IsAnyRegister() && cur_op.IsAnyRegister()) {
3720 LifetimePosition block_start =
3721 LifetimePosition::GapFromInstructionIndex(block->code_start());
3722 LifetimePosition block_end =
3723 LifetimePosition::GapFromInstructionIndex(block->code_end());
3724 const LiveRange* current = result.cur_cover_;
3725 const LiveRange* successor = current->next();
3726 if (current->End() < block_end &&
3727 (successor ==
nullptr || successor->spilled())) {
3731 bool uses_reg =
false;
3732 for (
const UsePosition* use = current->NextUsePosition(block_start);
3733 use !=
nullptr; use = use->next()) {
3734 if (use->operand()->IsAnyRegister()) {
3739 if (!uses_reg)
continue;
3741 if (current->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3742 pred_block->IsDeferred()) {
3745 current->TopLevel()->GetListOfBlocksRequiringSpillOperands()->Add(
3746 pred_block->rpo_number().ToInt());
3749 int move_loc = ResolveControlFlow(block, cur_op, pred_block, pred_op);
3752 result.cur_cover_->TopLevel()->IsSpilledOnlyInDeferredBlocks() &&
3753 !(pred_op.IsAnyRegister() && cur_op.IsAnyRegister()),
3754 code()->GetInstructionBlock(move_loc)->IsDeferred());
3763 const size_t live_ranges_size = data()->live_ranges().size();
3764 for (TopLevelLiveRange* top : data()->live_ranges()) {
3765 CHECK_EQ(live_ranges_size,
3766 data()->live_ranges().size());
3767 if (top ==
nullptr || top->IsEmpty() ||
3768 !top->IsSpilledOnlyInDeferredBlocks())
3770 CommitSpillsInDeferredBlocks(top, finder.ArrayFor(top->vreg()), local_zone);
3774 int LiveRangeConnector::ResolveControlFlow(
const InstructionBlock* block,
3775 const InstructionOperand& cur_op,
3776 const InstructionBlock* pred,
3777 const InstructionOperand& pred_op) {
3778 DCHECK(!pred_op.Equals(cur_op));
3780 Instruction::GapPosition position;
3781 if (block->PredecessorCount() == 1) {
3782 gap_index = block->first_instruction_index();
3783 position = Instruction::START;
3785 DCHECK_EQ(1, pred->SuccessorCount());
3787 ->InstructionAt(pred->last_instruction_index())
3788 ->HasReferenceMap());
3789 gap_index = pred->last_instruction_index();
3790 position = Instruction::END;
3792 data()->AddGapMove(gap_index, position, pred_op, cur_op);
3796 void LiveRangeConnector::ConnectRanges(Zone* local_zone) {
3797 DelayedInsertionMap delayed_insertion_map(local_zone);
3798 const size_t live_ranges_size = data()->live_ranges().size();
3799 for (TopLevelLiveRange* top_range : data()->live_ranges()) {
3800 CHECK_EQ(live_ranges_size,
3801 data()->live_ranges().size());
3802 if (top_range ==
nullptr)
continue;
3803 bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks();
3804 LiveRange* first_range = top_range;
3805 for (LiveRange *second_range = first_range->next(); second_range !=
nullptr;
3806 first_range = second_range, second_range = second_range->next()) {
3807 LifetimePosition pos = second_range->Start();
3810 if (second_range->spilled())
continue;
3811 if (first_range->End() != pos)
continue;
3812 if (data()->IsBlockBoundary(pos) &&
3813 !CanEagerlyResolveControlFlow(GetInstructionBlock(code(), pos))) {
3816 InstructionOperand prev_operand = first_range->GetAssignedOperand();
3817 InstructionOperand cur_operand = second_range->GetAssignedOperand();
3818 if (prev_operand.Equals(cur_operand))
continue;
3819 bool delay_insertion =
false;
3820 Instruction::GapPosition gap_pos;
3821 int gap_index = pos.ToInstructionIndex();
3822 if (connect_spilled && !prev_operand.IsAnyRegister() &&
3823 cur_operand.IsAnyRegister()) {
3824 const InstructionBlock* block = code()->GetInstructionBlock(gap_index);
3825 DCHECK(block->IsDeferred());
3828 top_range->GetListOfBlocksRequiringSpillOperands()->Add(
3829 block->rpo_number().ToInt());
3832 if (pos.IsGapPosition()) {
3833 gap_pos = pos.IsStart() ? Instruction::START : Instruction::END;
3835 if (pos.IsStart()) {
3836 delay_insertion =
true;
3840 gap_pos = delay_insertion ? Instruction::END : Instruction::START;
3844 DCHECK_IMPLIES(connect_spilled && !(prev_operand.IsAnyRegister() &&
3845 cur_operand.IsAnyRegister()),
3846 code()->GetInstructionBlock(gap_index)->IsDeferred());
3848 ParallelMove* move =
3849 code()->InstructionAt(gap_index)->GetOrCreateParallelMove(
3850 gap_pos, code_zone());
3851 if (!delay_insertion) {
3852 move->AddMove(prev_operand, cur_operand);
3854 delayed_insertion_map.insert(
3855 std::make_pair(std::make_pair(move, prev_operand), cur_operand));
3859 if (delayed_insertion_map.empty())
return;
3861 ZoneVector<MoveOperands*> to_insert(local_zone);
3862 ZoneVector<MoveOperands*> to_eliminate(local_zone);
3863 to_insert.reserve(4);
3864 to_eliminate.reserve(4);
3865 ParallelMove* moves = delayed_insertion_map.begin()->first.first;
3866 for (
auto it = delayed_insertion_map.begin();; ++it) {
3867 bool done = it == delayed_insertion_map.end();
3868 if (done || it->first.first != moves) {
3870 for (MoveOperands* move : to_eliminate) {
3873 for (MoveOperands* move : to_insert) {
3874 moves->push_back(move);
3878 to_eliminate.clear();
3880 moves = it->first.first;
3883 MoveOperands* move =
3884 new (code_zone()) MoveOperands(it->first.second, it->second);
3885 moves->PrepareInsertAfter(move, &to_eliminate);
3886 to_insert.push_back(move);
3890 void LiveRangeConnector::CommitSpillsInDeferredBlocks(
3891 TopLevelLiveRange* range, LiveRangeBoundArray* array, Zone* temp_zone) {
3892 DCHECK(range->IsSpilledOnlyInDeferredBlocks());
3893 DCHECK(!range->spilled());
3895 InstructionSequence* code = data()->code();
3896 InstructionOperand spill_operand = range->GetSpillRangeOperand();
3898 TRACE(
"Live Range %d will be spilled only in deferred blocks.\n",
3902 for (
const LiveRange* child = range; child !=
nullptr;
3903 child = child->next()) {
3904 for (
const UsePosition* pos = child->first_pos(); pos !=
nullptr;
3905 pos = pos->next()) {
3906 if (pos->type() != UsePositionType::kRequiresSlot && !child->spilled())
3908 range->AddBlockRequiringSpillOperand(
3909 code->GetInstructionBlock(pos->pos().ToInstructionIndex())
3914 ZoneQueue<int> worklist(temp_zone);
3916 for (BitVector::Iterator iterator(
3917 range->GetListOfBlocksRequiringSpillOperands());
3918 !iterator.Done(); iterator.Advance()) {
3919 worklist.push(iterator.Current());
3922 ZoneSet<std::pair<RpoNumber, int>> done_moves(temp_zone);
3925 BitVector done_blocks(
3926 range->GetListOfBlocksRequiringSpillOperands()->length(), temp_zone);
3927 while (!worklist.empty()) {
3928 int block_id = worklist.front();
3930 if (done_blocks.Contains(block_id))
continue;
3931 done_blocks.Add(block_id);
3932 InstructionBlock* spill_block =
3933 code->InstructionBlockAt(RpoNumber::FromInt(block_id));
3935 for (
const RpoNumber& pred : spill_block->predecessors()) {
3936 const InstructionBlock* pred_block = code->InstructionBlockAt(pred);
3938 if (pred_block->IsDeferred()) {
3939 worklist.push(pred_block->rpo_number().ToInt());
3941 LifetimePosition pred_end =
3942 LifetimePosition::InstructionFromInstructionIndex(
3943 pred_block->last_instruction_index());
3945 LiveRangeBound* bound = array->Find(pred_end);
3947 InstructionOperand pred_op = bound->range_->GetAssignedOperand();
3949 RpoNumber spill_block_number = spill_block->rpo_number();
3950 if (done_moves.find(std::make_pair(
3951 spill_block_number, range->vreg())) == done_moves.end()) {
3952 data()->AddGapMove(spill_block->first_instruction_index(),
3953 Instruction::GapPosition::START, pred_op,
3955 done_moves.insert(std::make_pair(spill_block_number, range->vreg()));
3956 spill_block->mark_needs_frame();