5 #ifndef V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_ 6 #define V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_ 8 #include "src/base/bits.h" 9 #include "src/base/compiler-specific.h" 10 #include "src/compiler/backend/instruction.h" 11 #include "src/globals.h" 12 #include "src/ostreams.h" 13 #include "src/register-configuration.h" 14 #include "src/zone/zone-containers.h" 20 enum RegisterKind { GENERAL_REGISTERS, FP_REGISTERS };
50 if (pos1 > pos2) std::swap(pos1, pos2);
52 if (next.IsGapPosition())
return next < pos2;
53 return next.NextFullStart() < pos2;
57 int value()
const {
return value_; }
61 int ToInstructionIndex()
const {
63 return value_ / kStep;
67 bool IsStart()
const {
return (value_ & (kHalfStep - 1)) == 0; }
69 bool IsEnd()
const {
return (value_ & (kHalfStep - 1)) == 1; }
71 bool IsFullStart()
const {
return (value_ & (kStep - 1)) == 0; }
73 bool IsGapPosition()
const {
return (value_ & 0x2) == 0; }
74 bool IsInstructionPosition()
const {
return !IsGapPosition(); }
109 DCHECK_LE(kHalfStep, value_);
119 bool IsValid()
const {
return value_ != -1; }
122 return this->value_ < that.value_;
126 return this->value_ <= that.value_;
130 return this->value_ == that.value_;
134 return this->value_ != that.value_;
138 return this->value_ > that.value_;
142 return this->value_ >= that.value_;
160 static const int kHalfStep = 2;
161 static const int kStep = 2 * kHalfStep;
163 static_assert(base::bits::IsPowerOfTwo(kHalfStep),
164 "Code relies on kStep and kHalfStep being a power of two");
177 : start_(start), end_(end), next_(
nullptr) {
195 if (other->start() < start_)
return other->Intersect(
this);
196 if (other->start() < end_)
return other->start();
197 return LifetimePosition::Invalid();
201 return start_ <= point && point < end_;
205 int FirstGapIndex()
const {
206 int ret = start_.ToInstructionIndex();
207 if (start_.IsInstructionPosition()) {
214 int LastGapIndex()
const {
215 int ret = end_.ToInstructionIndex();
216 if (end_.IsGapPosition() && end_.IsStart()) {
230 enum class UsePositionType : uint8_t {
232 kRegisterOrSlotOrConstant,
237 enum class UsePositionHintType : uint8_t {
245 static const int32_t kUnassignedRegister =
246 RegisterConfiguration::kMaxGeneralRegisters;
248 static_assert(kUnassignedRegister <= RegisterConfiguration::kMaxFPRegisters,
249 "kUnassignedRegister too small");
253 :
public NON_EXPORTED_BASE(ZoneObject) {
256 UsePositionHintType hint_type);
259 bool HasOperand()
const {
return operand_ !=
nullptr; }
261 bool RegisterIsBeneficial()
const {
262 return RegisterBeneficialField::decode(flags_);
264 UsePositionType
type()
const {
return TypeField::decode(flags_); }
265 void set_type(UsePositionType
type,
bool register_beneficial);
273 void set_assigned_register(
int register_code) {
274 flags_ = AssignedRegisterField::update(flags_, register_code);
277 UsePositionHintType hint_type()
const {
278 return HintTypeField::decode(flags_);
280 bool HasHint()
const;
281 bool HintRegister(
int* register_code)
const;
284 bool IsResolved()
const {
285 return hint_type() != UsePositionHintType::kUnresolved;
310 class V8_EXPORT_PRIVATE
LiveRange :
public NON_EXPORTED_BASE(ZoneObject) {
312 UseInterval* first_interval()
const {
return first_interval_; }
313 UsePosition* first_pos()
const {
return first_pos_; }
317 bool IsTopLevel()
const;
319 LiveRange* next()
const {
return next_; }
321 int relative_id()
const {
return relative_id_; }
323 bool IsEmpty()
const {
return first_interval() ==
nullptr; }
327 MachineRepresentation representation()
const {
328 return RepresentationField::decode(bits_);
331 int assigned_register()
const {
return AssignedRegisterField::decode(bits_); }
332 bool HasRegisterAssigned()
const {
333 return assigned_register() != kUnassignedRegister;
335 void set_assigned_register(
int reg);
336 void UnsetAssignedRegister();
338 bool spilled()
const {
return SpilledField::decode(bits_); }
341 RegisterKind kind()
const;
366 UsePosition* PreviousUsePositionRegisterIsBeneficial(
379 enum HintConnectionOption :
bool {
380 DoNotConnectHints =
false,
384 Zone* zone, HintConnectionOption connect_hints);
391 UsePosition* FirstHintPosition(
int* register_index)
const;
394 return FirstHintPosition(®ister_index);
398 DCHECK(current_hint_position_ == FirstHintPosition());
399 return current_hint_position_;
404 return first_interval()->start();
409 return last_interval_->end();
412 bool ShouldBeAllocatedBefore(
const LiveRange* other)
const;
417 void VerifyChildStructure()
const {
424 void SetUseHints(
int register_index);
425 void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
428 void Print(
bool with_children)
const;
432 explicit LiveRange(
int relative_id, MachineRepresentation rep,
437 void set_spilled(
bool value) { bits_ = SpilledField::update(bits_, value); }
440 void AdvanceLastProcessedMarker(
UseInterval* to_start_of,
443 void VerifyPositions()
const;
444 void VerifyIntervals()
const;
473 int spill_start_index()
const {
return spill_start_index_; }
475 bool IsFixed()
const {
return vreg_ < 0; }
477 bool is_phi()
const {
return IsPhiField::decode(bits_); }
478 void set_is_phi(
bool value) { bits_ = IsPhiField::update(bits_, value); }
480 bool is_non_loop_phi()
const {
return IsNonLoopPhiField::decode(bits_); }
481 void set_is_non_loop_phi(
bool value) {
482 bits_ = IsNonLoopPhiField::update(bits_, value);
485 bool has_slot_use()
const {
return HasSlotUseField::decode(bits_); }
486 void set_has_slot_use(
bool value) {
487 bits_ = HasSlotUseField::update(bits_, value);
510 enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
511 void set_spill_type(SpillType value) {
512 bits_ = SpillTypeField::update(bits_, value);
514 SpillType spill_type()
const {
return SpillTypeField::decode(bits_); }
516 DCHECK_EQ(SpillType::kSpillOperand, spill_type());
517 return spill_operand_;
521 DCHECK_NE(SpillType::kSpillOperand, spill_type());
526 DCHECK_EQ(SpillType::kSpillRange, spill_type());
529 bool HasNoSpillType()
const {
530 return spill_type() == SpillType::kNoSpillType;
532 bool HasSpillOperand()
const {
533 return spill_type() == SpillType::kSpillOperand;
535 bool HasSpillRange()
const {
return spill_type() == SpillType::kSpillRange; }
539 void RecordSpillLocation(
Zone* zone,
int gap_index,
542 void SetSpillStartIndex(
int start) {
543 spill_start_index_ = Min(start, spill_start_index_);
548 bool might_be_duplicated);
557 void TreatAsSpilledInDeferredBlock(
Zone* zone,
int total_block_count) {
558 spill_start_index_ = -1;
559 spilled_in_deferred_blocks_ =
true;
560 spill_move_insertion_locations_ =
nullptr;
561 list_of_blocks_requiring_spill_operands_ =
562 new (zone)
BitVector(total_block_count, zone);
570 bool IsSplinter()
const {
return splintered_from_ !=
nullptr; }
571 bool MayRequireSpillRange()
const {
572 DCHECK(!IsSplinter());
573 return !HasSpillOperand() && spill_range_ ==
nullptr;
576 int vreg()
const {
return vreg_; }
579 int debug_virt_reg()
const;
583 void VerifyChildrenInOrder()
const;
585 int GetNextChildId() {
586 return IsSplinter() ? splintered_from()->GetNextChildId()
590 int GetChildCount()
const {
return last_child_id_ + 1; }
592 bool IsSpilledOnlyInDeferredBlocks()
const {
593 return spilled_in_deferred_blocks_;
599 DCHECK(!IsSpilledOnlyInDeferredBlocks());
600 return spill_move_insertion_locations_;
604 DCHECK_NULL(splinter_);
605 DCHECK_NOT_NULL(splinter);
607 splinter_ = splinter;
608 splinter->relative_id_ = GetNextChildId();
609 splinter->set_spill_type(spill_type());
610 splinter->SetSplinteredFrom(
this);
613 void MarkHasPreassignedSlot() { has_preassigned_slot_ =
true; }
614 bool has_preassigned_slot()
const {
return has_preassigned_slot_; }
616 void AddBlockRequiringSpillOperand(
RpoNumber block_id) {
617 DCHECK(IsSpilledOnlyInDeferredBlocks());
618 GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt());
621 BitVector* GetListOfBlocksRequiringSpillOperands()
const {
622 DCHECK(IsSpilledOnlyInDeferredBlocks());
623 return list_of_blocks_requiring_spill_operands_;
645 BitVector* list_of_blocks_requiring_spill_operands_;
650 bool spilled_in_deferred_blocks_;
651 int spill_start_index_;
654 bool has_preassigned_slot_;
664 std::ostream& operator<<(std::ostream& os,
669 static const int kUnassignedSlot = -1;
672 UseInterval* interval()
const {
return use_interval_; }
674 bool IsEmpty()
const {
return live_ranges_.empty(); }
676 bool HasSlot()
const {
return assigned_slot_ != kUnassignedSlot; }
678 void set_assigned_slot(
int index) {
679 DCHECK_EQ(kUnassignedSlot, assigned_slot_);
680 assigned_slot_ = index;
682 int assigned_slot() {
683 DCHECK_NE(kUnassignedSlot, assigned_slot_);
684 return assigned_slot_;
691 int byte_width()
const {
return byte_width_; }
696 bool IsIntersectingWith(
SpillRange* other)
const;
719 int assigned_register()
const {
return assigned_register_; }
720 void set_assigned_register(
int register_code) {
721 DCHECK_EQ(assigned_register_, kUnassignedRegister);
722 assigned_register_ = register_code;
724 void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
733 int assigned_register_;
748 const char* debug_name =
nullptr);
755 return fixed_live_ranges_;
757 ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
758 return fixed_live_ranges_;
760 ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() {
761 return fixed_float_live_ranges_;
763 const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges()
const {
764 return fixed_float_live_ranges_;
766 ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
767 return fixed_double_live_ranges_;
769 const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges()
const {
770 return fixed_double_live_ranges_;
772 ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() {
773 return fixed_simd128_live_ranges_;
775 const ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges()
const {
776 return fixed_simd128_live_ranges_;
778 ZoneVector<BitVector*>& live_in_sets() {
return live_in_sets_; }
779 ZoneVector<BitVector*>& live_out_sets() {
return live_out_sets_; }
780 ZoneVector<SpillRange*>& spill_ranges() {
return spill_ranges_; }
781 DelayedReferences& delayed_references() {
return delayed_references_; }
782 InstructionSequence* code()
const {
return code_; }
785 Zone* allocation_zone()
const {
return allocation_zone_; }
788 Zone* code_zone()
const {
return code()->zone(); }
789 Frame* frame()
const {
return frame_; }
790 const char* debug_name()
const {
return debug_name_; }
791 const RegisterConfiguration* config()
const {
return config_; }
793 MachineRepresentation RepresentationFor(
int virtual_register);
795 TopLevelLiveRange* GetOrCreateLiveRangeFor(
int index);
797 TopLevelLiveRange* NewLiveRange(
int index, MachineRepresentation rep);
798 TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
800 SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
801 SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
803 MoveOperands* AddGapMove(
int index, Instruction::GapPosition position,
804 const InstructionOperand& from,
805 const InstructionOperand& to);
807 bool IsReference(TopLevelLiveRange* top_range)
const {
808 return code()->IsReference(top_range->vreg());
811 bool ExistsUseWithoutDefinition();
812 bool RangesDefinedInDeferredStayInDeferred();
814 void MarkAllocated(MachineRepresentation rep,
int index);
816 PhiMapValue* InitializePhiMap(
const InstructionBlock* block,
817 PhiInstruction* phi);
818 PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
819 PhiMapValue* GetPhiMapValueFor(
int virtual_register);
820 bool IsBlockBoundary(LifetimePosition pos)
const;
822 RangesWithPreassignedSlots& preassigned_slot_ranges() {
823 return preassigned_slot_ranges_;
827 int GetNextLiveRangeId();
829 Zone*
const allocation_zone_;
831 InstructionSequence*
const code_;
832 const char*
const debug_name_;
833 const RegisterConfiguration*
const config_;
835 ZoneVector<BitVector*> live_in_sets_;
836 ZoneVector<BitVector*> live_out_sets_;
837 ZoneVector<TopLevelLiveRange*> live_ranges_;
838 ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
839 ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_;
840 ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
841 ZoneVector<TopLevelLiveRange*> fixed_simd128_live_ranges_;
842 ZoneVector<SpillRange*> spill_ranges_;
843 DelayedReferences delayed_references_;
844 BitVector* assigned_registers_;
845 BitVector* assigned_double_registers_;
846 int virtual_register_count_;
847 RangesWithPreassignedSlots preassigned_slot_ranges_;
849 DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
857 void MeetRegisterConstraints();
866 Zone* allocation_zone()
const {
return data()->allocation_zone(); }
871 void MeetConstraintsBefore(
int index);
872 void MeetConstraintsAfter(
int index);
873 void MeetRegisterConstraintsForLastInstructionInBlock(
887 void BuildLiveRanges();
894 Zone* allocation_zone()
const {
return data()->allocation_zone(); }
895 Zone* code_zone()
const {
return code()->zone(); }
898 return data()->live_in_sets();
903 bool IntervalStartsAtBlockBoundary(
const UseInterval* interval)
const;
904 bool IntervalPredecessorsCoveredByRange(
const UseInterval* interval,
906 bool NextIntervalStartsInDifferentBlocks(
const UseInterval* interval)
const;
914 static int FixedLiveRangeID(
int index) {
return -index - 1; }
915 int FixedFPLiveRangeID(
int index, MachineRepresentation rep);
923 void* hint, UsePositionHintType hint_type);
925 return NewUsePosition(pos,
nullptr,
nullptr, UsePositionHintType::kNone);
930 void* hint, UsePositionHintType hint_type);
932 Define(position, operand,
nullptr, UsePositionHintType::kNone);
936 UsePositionHintType hint_type);
939 Use(block_start, position, operand,
nullptr, UsePositionHintType::kNone);
955 RegisterKind mode()
const {
return mode_; }
956 int num_registers()
const {
return num_registers_; }
957 int num_allocatable_registers()
const {
return num_allocatable_registers_; }
958 const int* allocatable_register_codes()
const {
959 return allocatable_register_codes_;
962 bool check_fp_aliasing()
const {
return check_fp_aliasing_; }
966 int instruction_index);
968 Zone* allocation_zone()
const {
return data()->allocation_zone(); }
972 void SplitAndSpillRangesDefinedByMemoryOperand();
982 bool CanProcessRange(
LiveRange* range)
const {
983 return range !=
nullptr && !range->IsEmpty() && range->kind() == mode();
1004 const char* RegisterName(
int allocation_index)
const;
1008 const RegisterKind mode_;
1009 const int num_registers_;
1010 int num_allocatable_registers_;
1011 const int* allocatable_register_codes_;
1012 bool check_fp_aliasing_;
1026 void AllocateRegisters();
1029 struct LiveRangeOrdering {
1031 return a->ShouldBeAllocatedBefore(b);
1035 LiveRangeQueue& unhandled_live_ranges() {
return unhandled_live_ranges_; }
1038 return inactive_live_ranges_;
1041 void SetLiveRangeAssignedRegister(
LiveRange* range,
int reg);
1058 bool TryAllocateFreeReg(
LiveRange* range,
1060 bool TryAllocatePreferredReg(
LiveRange* range,
1062 void GetFPRegisterSet(MachineRepresentation rep,
int* num_regs,
1063 int* num_codes,
const int** codes)
const;
1064 void FindFreeRegistersForRange(
LiveRange* range,
1066 void ProcessCurrentRange(
LiveRange* current);
1067 void AllocateBlockedReg(
LiveRange* range);
1068 bool TrySplitAndSpillSplinter(
LiveRange* range);
1082 void SplitAndSpillIntersecting(
LiveRange* range);
1083 void PrintRangeOverview(std::ostream& os);
1100 void LocateSpillSlots();
1115 void AssignSpillSlots();
1118 void CommitAssignment();
1133 void PopulateReferenceMaps();
1138 bool SafePointsAreInOrder()
const;
1159 void ConnectRanges(
Zone* local_zone);
1164 void ResolveControlFlow(
Zone* local_zone);
1169 Zone* code_zone()
const {
return code()->zone(); }
1191 #endif // V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_