V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
register-allocator.h
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
6 #define V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
7 
8 #include "src/base/bits.h"
9 #include "src/base/compiler-specific.h"
10 #include "src/compiler/backend/instruction.h"
11 #include "src/globals.h"
12 #include "src/ostreams.h"
13 #include "src/register-configuration.h"
14 #include "src/zone/zone-containers.h"
15 
16 namespace v8 {
17 namespace internal {
18 namespace compiler {
19 
20 enum RegisterKind { GENERAL_REGISTERS, FP_REGISTERS };
21 
22 // This class represents a single point of a InstructionOperand's lifetime. For
23 // each instruction there are four lifetime positions:
24 //
25 // [[START, END], [START, END]]
26 //
27 // Where the first half position corresponds to
28 //
29 // [GapPosition::START, GapPosition::END]
30 //
31 // and the second half position corresponds to
32 //
33 // [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
34 //
35 class LifetimePosition final {
36  public:
37  // Return the lifetime position that corresponds to the beginning of
38  // the gap with the given index.
39  static LifetimePosition GapFromInstructionIndex(int index) {
40  return LifetimePosition(index * kStep);
41  }
42  // Return the lifetime position that corresponds to the beginning of
43  // the instruction with the given index.
44  static LifetimePosition InstructionFromInstructionIndex(int index) {
45  return LifetimePosition(index * kStep + kHalfStep);
46  }
47 
48  static bool ExistsGapPositionBetween(LifetimePosition pos1,
49  LifetimePosition pos2) {
50  if (pos1 > pos2) std::swap(pos1, pos2);
51  LifetimePosition next(pos1.value_ + 1);
52  if (next.IsGapPosition()) return next < pos2;
53  return next.NextFullStart() < pos2;
54  }
55 
56  // Returns a numeric representation of this lifetime position.
57  int value() const { return value_; }
58 
59  // Returns the index of the instruction to which this lifetime position
60  // corresponds.
61  int ToInstructionIndex() const {
62  DCHECK(IsValid());
63  return value_ / kStep;
64  }
65 
66  // Returns true if this lifetime position corresponds to a START value
67  bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
68  // Returns true if this lifetime position corresponds to an END value
69  bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
70  // Returns true if this lifetime position corresponds to a gap START value
71  bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
72 
73  bool IsGapPosition() const { return (value_ & 0x2) == 0; }
74  bool IsInstructionPosition() const { return !IsGapPosition(); }
75 
76  // Returns the lifetime position for the current START.
77  LifetimePosition Start() const {
78  DCHECK(IsValid());
79  return LifetimePosition(value_ & ~(kHalfStep - 1));
80  }
81 
82  // Returns the lifetime position for the current gap START.
83  LifetimePosition FullStart() const {
84  DCHECK(IsValid());
85  return LifetimePosition(value_ & ~(kStep - 1));
86  }
87 
88  // Returns the lifetime position for the current END.
89  LifetimePosition End() const {
90  DCHECK(IsValid());
91  return LifetimePosition(Start().value_ + kHalfStep / 2);
92  }
93 
94  // Returns the lifetime position for the beginning of the next START.
95  LifetimePosition NextStart() const {
96  DCHECK(IsValid());
97  return LifetimePosition(Start().value_ + kHalfStep);
98  }
99 
100  // Returns the lifetime position for the beginning of the next gap START.
101  LifetimePosition NextFullStart() const {
102  DCHECK(IsValid());
103  return LifetimePosition(FullStart().value_ + kStep);
104  }
105 
106  // Returns the lifetime position for the beginning of the previous START.
107  LifetimePosition PrevStart() const {
108  DCHECK(IsValid());
109  DCHECK_LE(kHalfStep, value_);
110  return LifetimePosition(Start().value_ - kHalfStep);
111  }
112 
113  // Constructs the lifetime position which does not correspond to any
114  // instruction.
115  LifetimePosition() : value_(-1) {}
116 
117  // Returns true if this lifetime positions corrensponds to some
118  // instruction.
119  bool IsValid() const { return value_ != -1; }
120 
121  bool operator<(const LifetimePosition& that) const {
122  return this->value_ < that.value_;
123  }
124 
125  bool operator<=(const LifetimePosition& that) const {
126  return this->value_ <= that.value_;
127  }
128 
129  bool operator==(const LifetimePosition& that) const {
130  return this->value_ == that.value_;
131  }
132 
133  bool operator!=(const LifetimePosition& that) const {
134  return this->value_ != that.value_;
135  }
136 
137  bool operator>(const LifetimePosition& that) const {
138  return this->value_ > that.value_;
139  }
140 
141  bool operator>=(const LifetimePosition& that) const {
142  return this->value_ >= that.value_;
143  }
144 
145  void Print() const;
146 
147  static inline LifetimePosition Invalid() { return LifetimePosition(); }
148 
149  static inline LifetimePosition MaxPosition() {
150  // We have to use this kind of getter instead of static member due to
151  // crash bug in GDB.
152  return LifetimePosition(kMaxInt);
153  }
154 
155  static inline LifetimePosition FromInt(int value) {
156  return LifetimePosition(value);
157  }
158 
159  private:
160  static const int kHalfStep = 2;
161  static const int kStep = 2 * kHalfStep;
162 
163  static_assert(base::bits::IsPowerOfTwo(kHalfStep),
164  "Code relies on kStep and kHalfStep being a power of two");
165 
166  explicit LifetimePosition(int value) : value_(value) {}
167 
168  int value_;
169 };
170 
171 std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
172 
173 // Representation of the non-empty interval [start,end[.
174 class UseInterval final : public ZoneObject {
175  public:
177  : start_(start), end_(end), next_(nullptr) {
178  DCHECK(start < end);
179  }
180 
181  LifetimePosition start() const { return start_; }
182  void set_start(LifetimePosition start) { start_ = start; }
183  LifetimePosition end() const { return end_; }
184  void set_end(LifetimePosition end) { end_ = end; }
185  UseInterval* next() const { return next_; }
186  void set_next(UseInterval* next) { next_ = next; }
187 
188  // Split this interval at the given position without effecting the
189  // live range that owns it. The interval must contain the position.
190  UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
191 
192  // If this interval intersects with other return smallest position
193  // that belongs to both of them.
194  LifetimePosition Intersect(const UseInterval* other) const {
195  if (other->start() < start_) return other->Intersect(this);
196  if (other->start() < end_) return other->start();
197  return LifetimePosition::Invalid();
198  }
199 
200  bool Contains(LifetimePosition point) const {
201  return start_ <= point && point < end_;
202  }
203 
204  // Returns the index of the first gap covered by this interval.
205  int FirstGapIndex() const {
206  int ret = start_.ToInstructionIndex();
207  if (start_.IsInstructionPosition()) {
208  ++ret;
209  }
210  return ret;
211  }
212 
213  // Returns the index of the last gap covered by this interval.
214  int LastGapIndex() const {
215  int ret = end_.ToInstructionIndex();
216  if (end_.IsGapPosition() && end_.IsStart()) {
217  --ret;
218  }
219  return ret;
220  }
221 
222  private:
223  LifetimePosition start_;
224  LifetimePosition end_;
225  UseInterval* next_;
226 
227  DISALLOW_COPY_AND_ASSIGN(UseInterval);
228 };
229 
230 enum class UsePositionType : uint8_t {
231  kRegisterOrSlot,
232  kRegisterOrSlotOrConstant,
233  kRequiresRegister,
234  kRequiresSlot
235 };
236 
237 enum class UsePositionHintType : uint8_t {
238  kNone,
239  kOperand,
240  kUsePos,
241  kPhi,
242  kUnresolved
243 };
244 
245 static const int32_t kUnassignedRegister =
246  RegisterConfiguration::kMaxGeneralRegisters;
247 
248 static_assert(kUnassignedRegister <= RegisterConfiguration::kMaxFPRegisters,
249  "kUnassignedRegister too small");
250 
251 // Representation of a use position.
252 class V8_EXPORT_PRIVATE UsePosition final
253  : public NON_EXPORTED_BASE(ZoneObject) {
254  public:
255  UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
256  UsePositionHintType hint_type);
257 
258  InstructionOperand* operand() const { return operand_; }
259  bool HasOperand() const { return operand_ != nullptr; }
260 
261  bool RegisterIsBeneficial() const {
262  return RegisterBeneficialField::decode(flags_);
263  }
264  UsePositionType type() const { return TypeField::decode(flags_); }
265  void set_type(UsePositionType type, bool register_beneficial);
266 
267  LifetimePosition pos() const { return pos_; }
268 
269  UsePosition* next() const { return next_; }
270  void set_next(UsePosition* next) { next_ = next; }
271 
272  // For hinting only.
273  void set_assigned_register(int register_code) {
274  flags_ = AssignedRegisterField::update(flags_, register_code);
275  }
276 
277  UsePositionHintType hint_type() const {
278  return HintTypeField::decode(flags_);
279  }
280  bool HasHint() const;
281  bool HintRegister(int* register_code) const;
282  void SetHint(UsePosition* use_pos);
283  void ResolveHint(UsePosition* use_pos);
284  bool IsResolved() const {
285  return hint_type() != UsePositionHintType::kUnresolved;
286  }
287  static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
288 
289  private:
294 
295  InstructionOperand* const operand_;
296  void* hint_;
297  UsePosition* next_;
298  LifetimePosition const pos_;
299  uint32_t flags_;
300 
301  DISALLOW_COPY_AND_ASSIGN(UsePosition);
302 };
303 
304 class SpillRange;
306 class TopLevelLiveRange;
307 
308 // Representation of SSA values' live ranges as a collection of (continuous)
309 // intervals over the instruction ordering.
310 class V8_EXPORT_PRIVATE LiveRange : public NON_EXPORTED_BASE(ZoneObject) {
311  public:
312  UseInterval* first_interval() const { return first_interval_; }
313  UsePosition* first_pos() const { return first_pos_; }
314  TopLevelLiveRange* TopLevel() { return top_level_; }
315  const TopLevelLiveRange* TopLevel() const { return top_level_; }
316 
317  bool IsTopLevel() const;
318 
319  LiveRange* next() const { return next_; }
320 
321  int relative_id() const { return relative_id_; }
322 
323  bool IsEmpty() const { return first_interval() == nullptr; }
324 
325  InstructionOperand GetAssignedOperand() const;
326 
327  MachineRepresentation representation() const {
328  return RepresentationField::decode(bits_);
329  }
330 
331  int assigned_register() const { return AssignedRegisterField::decode(bits_); }
332  bool HasRegisterAssigned() const {
333  return assigned_register() != kUnassignedRegister;
334  }
335  void set_assigned_register(int reg);
336  void UnsetAssignedRegister();
337 
338  bool spilled() const { return SpilledField::decode(bits_); }
339  void Spill();
340 
341  RegisterKind kind() const;
342 
343  // Returns use position in this live range that follows both start
344  // and last processed use position.
345  UsePosition* NextUsePosition(LifetimePosition start) const;
346 
347  // Returns use position for which register is required in this live
348  // range and which follows both start and last processed use position
349  UsePosition* NextRegisterPosition(LifetimePosition start) const;
350 
351  // Returns the first use position requiring stack slot, or nullptr.
352  UsePosition* NextSlotPosition(LifetimePosition start) const;
353 
354  // Returns use position for which register is beneficial in this live
355  // range and which follows both start and last processed use position
356  UsePosition* NextUsePositionRegisterIsBeneficial(
357  LifetimePosition start) const;
358 
359  // Returns lifetime position for which register is beneficial in this live
360  // range and which follows both start and last processed use position.
361  LifetimePosition NextLifetimePositionRegisterIsBeneficial(
362  const LifetimePosition& start) const;
363 
364  // Returns use position for which register is beneficial in this live
365  // range and which precedes start.
366  UsePosition* PreviousUsePositionRegisterIsBeneficial(
367  LifetimePosition start) const;
368 
369  // Can this live range be spilled at this position.
370  bool CanBeSpilled(LifetimePosition pos) const;
371 
372  // Splitting primitive used by both splitting and splintering members.
373  // Performs the split, but does not link the resulting ranges.
374  // The given position must follow the start of the range.
375  // All uses following the given position will be moved from this
376  // live range to the result live range.
377  // The current range will terminate at position, while result will start from
378  // position.
379  enum HintConnectionOption : bool {
380  DoNotConnectHints = false,
381  ConnectHints = true
382  };
383  UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
384  Zone* zone, HintConnectionOption connect_hints);
385 
386  // Detaches at position, and then links the resulting ranges. Returns the
387  // child, which starts at position.
388  LiveRange* SplitAt(LifetimePosition position, Zone* zone);
389 
390  // Returns nullptr when no register is hinted, otherwise sets register_index.
391  UsePosition* FirstHintPosition(int* register_index) const;
392  UsePosition* FirstHintPosition() const {
393  int register_index;
394  return FirstHintPosition(&register_index);
395  }
396 
397  UsePosition* current_hint_position() const {
398  DCHECK(current_hint_position_ == FirstHintPosition());
399  return current_hint_position_;
400  }
401 
402  LifetimePosition Start() const {
403  DCHECK(!IsEmpty());
404  return first_interval()->start();
405  }
406 
407  LifetimePosition End() const {
408  DCHECK(!IsEmpty());
409  return last_interval_->end();
410  }
411 
412  bool ShouldBeAllocatedBefore(const LiveRange* other) const;
413  bool CanCover(LifetimePosition position) const;
414  bool Covers(LifetimePosition position) const;
415  LifetimePosition FirstIntersection(LiveRange* other) const;
416 
417  void VerifyChildStructure() const {
418  VerifyIntervals();
419  VerifyPositions();
420  }
421 
422  void ConvertUsesToOperand(const InstructionOperand& op,
423  const InstructionOperand& spill_op);
424  void SetUseHints(int register_index);
425  void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
426 
427  void Print(const RegisterConfiguration* config, bool with_children) const;
428  void Print(bool with_children) const;
429 
430  private:
431  friend class TopLevelLiveRange;
432  explicit LiveRange(int relative_id, MachineRepresentation rep,
433  TopLevelLiveRange* top_level);
434 
435  void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
436 
437  void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
438 
439  UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
440  void AdvanceLastProcessedMarker(UseInterval* to_start_of,
441  LifetimePosition but_not_past) const;
442 
443  void VerifyPositions() const;
444  void VerifyIntervals() const;
445 
449 
450  // Unique among children and splinters of the same virtual register.
451  int relative_id_;
452  uint32_t bits_;
453  UseInterval* last_interval_;
454  UseInterval* first_interval_;
455  UsePosition* first_pos_;
456  TopLevelLiveRange* top_level_;
457  LiveRange* next_;
458  // This is used as a cache, it doesn't affect correctness.
459  mutable UseInterval* current_interval_;
460  // This is used as a cache, it doesn't affect correctness.
461  mutable UsePosition* last_processed_use_;
462  // This is used as a cache, it's invalid outside of BuildLiveRanges.
463  mutable UsePosition* current_hint_position_;
464  // Cache the last position splintering stopped at.
465  mutable UsePosition* splitting_pointer_;
466 
467  DISALLOW_COPY_AND_ASSIGN(LiveRange);
468 };
469 
470 class V8_EXPORT_PRIVATE TopLevelLiveRange final : public LiveRange {
471  public:
472  explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
473  int spill_start_index() const { return spill_start_index_; }
474 
475  bool IsFixed() const { return vreg_ < 0; }
476 
477  bool is_phi() const { return IsPhiField::decode(bits_); }
478  void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
479 
480  bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
481  void set_is_non_loop_phi(bool value) {
482  bits_ = IsNonLoopPhiField::update(bits_, value);
483  }
484 
485  bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
486  void set_has_slot_use(bool value) {
487  bits_ = HasSlotUseField::update(bits_, value);
488  }
489 
490  // Add a new interval or a new use position to this live range.
491  void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
492  void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
493  void AddUsePosition(UsePosition* pos);
494 
495  // Shorten the most recently added interval by setting a new start.
496  void ShortenTo(LifetimePosition start);
497 
498  // Detaches between start and end, and attributes the resulting range to
499  // result.
500  // The current range is pointed to as "splintered_from". No parent/child
501  // relationship is established between this and result.
502  void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone);
503 
504  // Assuming other was splintered from this range, embeds other and its
505  // children as part of the children sequence of this range.
506  void Merge(TopLevelLiveRange* other, Zone* zone);
507 
508  // Spill range management.
509  void SetSpillRange(SpillRange* spill_range);
510  enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
511  void set_spill_type(SpillType value) {
512  bits_ = SpillTypeField::update(bits_, value);
513  }
514  SpillType spill_type() const { return SpillTypeField::decode(bits_); }
515  InstructionOperand* GetSpillOperand() const {
516  DCHECK_EQ(SpillType::kSpillOperand, spill_type());
517  return spill_operand_;
518  }
519 
520  SpillRange* GetAllocatedSpillRange() const {
521  DCHECK_NE(SpillType::kSpillOperand, spill_type());
522  return spill_range_;
523  }
524 
525  SpillRange* GetSpillRange() const {
526  DCHECK_EQ(SpillType::kSpillRange, spill_type());
527  return spill_range_;
528  }
529  bool HasNoSpillType() const {
530  return spill_type() == SpillType::kNoSpillType;
531  }
532  bool HasSpillOperand() const {
533  return spill_type() == SpillType::kSpillOperand;
534  }
535  bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
536 
537  AllocatedOperand GetSpillRangeOperand() const;
538 
539  void RecordSpillLocation(Zone* zone, int gap_index,
540  InstructionOperand* operand);
541  void SetSpillOperand(InstructionOperand* operand);
542  void SetSpillStartIndex(int start) {
543  spill_start_index_ = Min(start, spill_start_index_);
544  }
545 
546  void CommitSpillMoves(InstructionSequence* sequence,
547  const InstructionOperand& operand,
548  bool might_be_duplicated);
549 
550  // If all the children of this range are spilled in deferred blocks, and if
551  // for any non-spilled child with a use position requiring a slot, that range
552  // is contained in a deferred block, mark the range as
553  // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
554  // and instead let the LiveRangeConnector perform the spills within the
555  // deferred blocks. If so, we insert here spills for non-spilled ranges
556  // with slot use positions.
557  void TreatAsSpilledInDeferredBlock(Zone* zone, int total_block_count) {
558  spill_start_index_ = -1;
559  spilled_in_deferred_blocks_ = true;
560  spill_move_insertion_locations_ = nullptr;
561  list_of_blocks_requiring_spill_operands_ =
562  new (zone) BitVector(total_block_count, zone);
563  }
564 
565  void CommitSpillInDeferredBlocks(RegisterAllocationData* data,
566  const InstructionOperand& spill_operand,
567  BitVector* necessary_spill_points);
568 
569  TopLevelLiveRange* splintered_from() const { return splintered_from_; }
570  bool IsSplinter() const { return splintered_from_ != nullptr; }
571  bool MayRequireSpillRange() const {
572  DCHECK(!IsSplinter());
573  return !HasSpillOperand() && spill_range_ == nullptr;
574  }
575  void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
576  int vreg() const { return vreg_; }
577 
578 #if DEBUG
579  int debug_virt_reg() const;
580 #endif
581 
582  void Verify() const;
583  void VerifyChildrenInOrder() const;
584 
585  int GetNextChildId() {
586  return IsSplinter() ? splintered_from()->GetNextChildId()
587  : ++last_child_id_;
588  }
589 
590  int GetChildCount() const { return last_child_id_ + 1; }
591 
592  bool IsSpilledOnlyInDeferredBlocks() const {
593  return spilled_in_deferred_blocks_;
594  }
595 
596  struct SpillMoveInsertionList;
597 
598  SpillMoveInsertionList* GetSpillMoveInsertionLocations() const {
599  DCHECK(!IsSpilledOnlyInDeferredBlocks());
600  return spill_move_insertion_locations_;
601  }
602  TopLevelLiveRange* splinter() const { return splinter_; }
603  void SetSplinter(TopLevelLiveRange* splinter) {
604  DCHECK_NULL(splinter_);
605  DCHECK_NOT_NULL(splinter);
606 
607  splinter_ = splinter;
608  splinter->relative_id_ = GetNextChildId();
609  splinter->set_spill_type(spill_type());
610  splinter->SetSplinteredFrom(this);
611  }
612 
613  void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
614  bool has_preassigned_slot() const { return has_preassigned_slot_; }
615 
616  void AddBlockRequiringSpillOperand(RpoNumber block_id) {
617  DCHECK(IsSpilledOnlyInDeferredBlocks());
618  GetListOfBlocksRequiringSpillOperands()->Add(block_id.ToInt());
619  }
620 
621  BitVector* GetListOfBlocksRequiringSpillOperands() const {
622  DCHECK(IsSpilledOnlyInDeferredBlocks());
623  return list_of_blocks_requiring_spill_operands_;
624  }
625 
626  private:
627  void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
628 
633 
634  int vreg_;
635  int last_child_id_;
636  TopLevelLiveRange* splintered_from_;
637  union {
638  // Correct value determined by spill_type()
639  InstructionOperand* spill_operand_;
640  SpillRange* spill_range_;
641  };
642 
643  union {
644  SpillMoveInsertionList* spill_move_insertion_locations_;
645  BitVector* list_of_blocks_requiring_spill_operands_;
646  };
647 
648  // TODO(mtrofin): generalize spilling after definition, currently specialized
649  // just for spill in a single deferred block.
650  bool spilled_in_deferred_blocks_;
651  int spill_start_index_;
652  UsePosition* last_pos_;
653  TopLevelLiveRange* splinter_;
654  bool has_preassigned_slot_;
655 
656  DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
657 };
658 
660  const RegisterConfiguration* register_configuration_;
661  const LiveRange* range_;
662 };
663 
664 std::ostream& operator<<(std::ostream& os,
665  const PrintableLiveRange& printable_range);
666 
667 class SpillRange final : public ZoneObject {
668  public:
669  static const int kUnassignedSlot = -1;
670  SpillRange(TopLevelLiveRange* range, Zone* zone);
671 
672  UseInterval* interval() const { return use_interval_; }
673 
674  bool IsEmpty() const { return live_ranges_.empty(); }
675  bool TryMerge(SpillRange* other);
676  bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
677 
678  void set_assigned_slot(int index) {
679  DCHECK_EQ(kUnassignedSlot, assigned_slot_);
680  assigned_slot_ = index;
681  }
682  int assigned_slot() {
683  DCHECK_NE(kUnassignedSlot, assigned_slot_);
684  return assigned_slot_;
685  }
686  const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
687  return live_ranges_;
688  }
689  ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
690  // Spill slots can be 4, 8, or 16 bytes wide.
691  int byte_width() const { return byte_width_; }
692  void Print() const;
693 
694  private:
695  LifetimePosition End() const { return end_position_; }
696  bool IsIntersectingWith(SpillRange* other) const;
697  // Merge intervals, making sure the use intervals are sorted
698  void MergeDisjointIntervals(UseInterval* other);
699 
700  ZoneVector<TopLevelLiveRange*> live_ranges_;
701  UseInterval* use_interval_;
702  LifetimePosition end_position_;
703  int assigned_slot_;
704  int byte_width_;
705 
706  DISALLOW_COPY_AND_ASSIGN(SpillRange);
707 };
708 
709 class RegisterAllocationData final : public ZoneObject {
710  public:
711  class PhiMapValue : public ZoneObject {
712  public:
713  PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
714 
715  const PhiInstruction* phi() const { return phi_; }
716  const InstructionBlock* block() const { return block_; }
717 
718  // For hinting.
719  int assigned_register() const { return assigned_register_; }
720  void set_assigned_register(int register_code) {
721  DCHECK_EQ(assigned_register_, kUnassignedRegister);
722  assigned_register_ = register_code;
723  }
724  void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
725 
726  void AddOperand(InstructionOperand* operand);
727  void CommitAssignment(const InstructionOperand& operand);
728 
729  private:
730  PhiInstruction* const phi_;
731  const InstructionBlock* const block_;
732  ZoneVector<InstructionOperand*> incoming_operands_;
733  int assigned_register_;
734  };
736 
738  ReferenceMap* map;
739  InstructionOperand* operand;
740  };
744 
746  Zone* allocation_zone, Frame* frame,
747  InstructionSequence* code,
748  const char* debug_name = nullptr);
749 
750  const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
751  return live_ranges_;
752  }
753  ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
754  const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
755  return fixed_live_ranges_;
756  }
757  ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
758  return fixed_live_ranges_;
759  }
760  ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() {
761  return fixed_float_live_ranges_;
762  }
763  const ZoneVector<TopLevelLiveRange*>& fixed_float_live_ranges() const {
764  return fixed_float_live_ranges_;
765  }
766  ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
767  return fixed_double_live_ranges_;
768  }
769  const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
770  return fixed_double_live_ranges_;
771  }
772  ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() {
773  return fixed_simd128_live_ranges_;
774  }
775  const ZoneVector<TopLevelLiveRange*>& fixed_simd128_live_ranges() const {
776  return fixed_simd128_live_ranges_;
777  }
778  ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
779  ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
780  ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
781  DelayedReferences& delayed_references() { return delayed_references_; }
782  InstructionSequence* code() const { return code_; }
783  // This zone is for data structures only needed during register allocation
784  // phases.
785  Zone* allocation_zone() const { return allocation_zone_; }
786  // This zone is for InstructionOperands and moves that live beyond register
787  // allocation.
788  Zone* code_zone() const { return code()->zone(); }
789  Frame* frame() const { return frame_; }
790  const char* debug_name() const { return debug_name_; }
791  const RegisterConfiguration* config() const { return config_; }
792 
793  MachineRepresentation RepresentationFor(int virtual_register);
794 
795  TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
796  // Creates a new live range.
797  TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
798  TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
799 
800  SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
801  SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
802 
803  MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
804  const InstructionOperand& from,
805  const InstructionOperand& to);
806 
807  bool IsReference(TopLevelLiveRange* top_range) const {
808  return code()->IsReference(top_range->vreg());
809  }
810 
811  bool ExistsUseWithoutDefinition();
812  bool RangesDefinedInDeferredStayInDeferred();
813 
814  void MarkAllocated(MachineRepresentation rep, int index);
815 
816  PhiMapValue* InitializePhiMap(const InstructionBlock* block,
817  PhiInstruction* phi);
818  PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
819  PhiMapValue* GetPhiMapValueFor(int virtual_register);
820  bool IsBlockBoundary(LifetimePosition pos) const;
821 
822  RangesWithPreassignedSlots& preassigned_slot_ranges() {
823  return preassigned_slot_ranges_;
824  }
825 
826  private:
827  int GetNextLiveRangeId();
828 
829  Zone* const allocation_zone_;
830  Frame* const frame_;
831  InstructionSequence* const code_;
832  const char* const debug_name_;
833  const RegisterConfiguration* const config_;
834  PhiMap phi_map_;
835  ZoneVector<BitVector*> live_in_sets_;
836  ZoneVector<BitVector*> live_out_sets_;
837  ZoneVector<TopLevelLiveRange*> live_ranges_;
838  ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
839  ZoneVector<TopLevelLiveRange*> fixed_float_live_ranges_;
840  ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
841  ZoneVector<TopLevelLiveRange*> fixed_simd128_live_ranges_;
842  ZoneVector<SpillRange*> spill_ranges_;
843  DelayedReferences delayed_references_;
844  BitVector* assigned_registers_;
845  BitVector* assigned_double_registers_;
846  int virtual_register_count_;
847  RangesWithPreassignedSlots preassigned_slot_ranges_;
848 
849  DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
850 };
851 
852 class ConstraintBuilder final : public ZoneObject {
853  public:
855 
856  // Phase 1 : insert moves to account for fixed register operands.
857  void MeetRegisterConstraints();
858 
859  // Phase 2: deconstruct SSA by inserting moves in successors and the headers
860  // of blocks containing phis.
861  void ResolvePhis();
862 
863  private:
864  RegisterAllocationData* data() const { return data_; }
865  InstructionSequence* code() const { return data()->code(); }
866  Zone* allocation_zone() const { return data()->allocation_zone(); }
867 
868  InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
869  bool is_tagged);
870  void MeetRegisterConstraints(const InstructionBlock* block);
871  void MeetConstraintsBefore(int index);
872  void MeetConstraintsAfter(int index);
873  void MeetRegisterConstraintsForLastInstructionInBlock(
874  const InstructionBlock* block);
875  void ResolvePhis(const InstructionBlock* block);
876 
877  RegisterAllocationData* const data_;
878 
879  DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder);
880 };
881 
882 class LiveRangeBuilder final : public ZoneObject {
883  public:
884  explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone);
885 
886  // Phase 3: compute liveness of all virtual register.
887  void BuildLiveRanges();
888  static BitVector* ComputeLiveOut(const InstructionBlock* block,
889  RegisterAllocationData* data);
890 
891  private:
892  RegisterAllocationData* data() const { return data_; }
893  InstructionSequence* code() const { return data()->code(); }
894  Zone* allocation_zone() const { return data()->allocation_zone(); }
895  Zone* code_zone() const { return code()->zone(); }
896  const RegisterConfiguration* config() const { return data()->config(); }
897  ZoneVector<BitVector*>& live_in_sets() const {
898  return data()->live_in_sets();
899  }
900 
901  // Verification.
902  void Verify() const;
903  bool IntervalStartsAtBlockBoundary(const UseInterval* interval) const;
904  bool IntervalPredecessorsCoveredByRange(const UseInterval* interval,
905  const TopLevelLiveRange* range) const;
906  bool NextIntervalStartsInDifferentBlocks(const UseInterval* interval) const;
907 
908  // Liveness analysis support.
909  void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
910  void ProcessInstructions(const InstructionBlock* block, BitVector* live);
911  void ProcessPhis(const InstructionBlock* block, BitVector* live);
912  void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
913 
914  static int FixedLiveRangeID(int index) { return -index - 1; }
915  int FixedFPLiveRangeID(int index, MachineRepresentation rep);
916  TopLevelLiveRange* FixedLiveRangeFor(int index);
917  TopLevelLiveRange* FixedFPLiveRangeFor(int index, MachineRepresentation rep);
918 
919  void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
920  void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
921 
922  UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
923  void* hint, UsePositionHintType hint_type);
924  UsePosition* NewUsePosition(LifetimePosition pos) {
925  return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
926  }
927  TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
928  // Helper methods for building intervals.
929  UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
930  void* hint, UsePositionHintType hint_type);
931  void Define(LifetimePosition position, InstructionOperand* operand) {
932  Define(position, operand, nullptr, UsePositionHintType::kNone);
933  }
934  UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
935  InstructionOperand* operand, void* hint,
936  UsePositionHintType hint_type);
937  void Use(LifetimePosition block_start, LifetimePosition position,
938  InstructionOperand* operand) {
939  Use(block_start, position, operand, nullptr, UsePositionHintType::kNone);
940  }
941 
942  RegisterAllocationData* const data_;
944 
945  DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
946 };
947 
949  public:
950  RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
951 
952  protected:
953  RegisterAllocationData* data() const { return data_; }
954  InstructionSequence* code() const { return data()->code(); }
955  RegisterKind mode() const { return mode_; }
956  int num_registers() const { return num_registers_; }
957  int num_allocatable_registers() const { return num_allocatable_registers_; }
958  const int* allocatable_register_codes() const {
959  return allocatable_register_codes_;
960  }
961  // Returns true iff. we must check float register aliasing.
962  bool check_fp_aliasing() const { return check_fp_aliasing_; }
963 
964  // TODO(mtrofin): explain why splitting in gap START is always OK.
965  LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
966  int instruction_index);
967 
968  Zone* allocation_zone() const { return data()->allocation_zone(); }
969 
970  // Find the optimal split for ranges defined by a memory operand, e.g.
971  // constants or function parameters passed on the stack.
972  void SplitAndSpillRangesDefinedByMemoryOperand();
973 
974  // Split the given range at the given position.
975  // If range starts at or after the given position then the
976  // original range is returned.
977  // Otherwise returns the live range that starts at pos and contains
978  // all uses from the original range that follow pos. Uses at pos will
979  // still be owned by the original range after splitting.
980  LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
981 
982  bool CanProcessRange(LiveRange* range) const {
983  return range != nullptr && !range->IsEmpty() && range->kind() == mode();
984  }
985 
986  // Split the given range in a position from the interval [start, end].
987  LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
988  LifetimePosition end);
989 
990  // Find a lifetime position in the interval [start, end] which
991  // is optimal for splitting: it is either header of the outermost
992  // loop covered by this interval or the latest possible position.
993  LifetimePosition FindOptimalSplitPos(LifetimePosition start,
994  LifetimePosition end);
995 
996  void Spill(LiveRange* range);
997 
998  // If we are trying to spill a range inside the loop try to
999  // hoist spill position out to the point just before the loop.
1000  LifetimePosition FindOptimalSpillingPos(LiveRange* range,
1001  LifetimePosition pos);
1002 
1003  const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
1004  const char* RegisterName(int allocation_index) const;
1005 
1006  private:
1007  RegisterAllocationData* const data_;
1008  const RegisterKind mode_;
1009  const int num_registers_;
1010  int num_allocatable_registers_;
1011  const int* allocatable_register_codes_;
1012  bool check_fp_aliasing_;
1013 
1014  private:
1015  bool no_combining_;
1016 
1017  DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
1018 };
1019 
1021  public:
1022  LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
1023  Zone* local_zone);
1024 
1025  // Phase 4: compute register assignments.
1026  void AllocateRegisters();
1027 
1028  private:
1029  struct LiveRangeOrdering {
1030  bool operator()(const LiveRange* a, const LiveRange* b) const {
1031  return a->ShouldBeAllocatedBefore(b);
1032  }
1033  };
1035  LiveRangeQueue& unhandled_live_ranges() { return unhandled_live_ranges_; }
1036  ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
1037  ZoneVector<LiveRange*>& inactive_live_ranges() {
1038  return inactive_live_ranges_;
1039  }
1040 
1041  void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
1042 
1043  // Helper methods for updating the life range lists.
1044  void AddToActive(LiveRange* range);
1045  void AddToInactive(LiveRange* range);
1046  void AddToUnhandled(LiveRange* range);
1047  ZoneVector<LiveRange*>::iterator ActiveToHandled(
1049  ZoneVector<LiveRange*>::iterator ActiveToInactive(
1051  ZoneVector<LiveRange*>::iterator InactiveToHandled(
1053  ZoneVector<LiveRange*>::iterator InactiveToActive(
1055 
1056  // Helper methods for allocating registers.
1057  bool TryReuseSpillForPhi(TopLevelLiveRange* range);
1058  bool TryAllocateFreeReg(LiveRange* range,
1059  const Vector<LifetimePosition>& free_until_pos);
1060  bool TryAllocatePreferredReg(LiveRange* range,
1061  const Vector<LifetimePosition>& free_until_pos);
1062  void GetFPRegisterSet(MachineRepresentation rep, int* num_regs,
1063  int* num_codes, const int** codes) const;
1064  void FindFreeRegistersForRange(LiveRange* range,
1065  Vector<LifetimePosition> free_until_pos);
1066  void ProcessCurrentRange(LiveRange* current);
1067  void AllocateBlockedReg(LiveRange* range);
1068  bool TrySplitAndSpillSplinter(LiveRange* range);
1069 
1070  // Spill the given life range after position pos.
1071  void SpillAfter(LiveRange* range, LifetimePosition pos);
1072 
1073  // Spill the given life range after position [start] and up to position [end].
1074  void SpillBetween(LiveRange* range, LifetimePosition start,
1075  LifetimePosition end);
1076 
1077  // Spill the given life range after position [start] and up to position [end].
1078  // Range is guaranteed to be spilled at least until position [until].
1079  void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
1080  LifetimePosition until, LifetimePosition end);
1081 
1082  void SplitAndSpillIntersecting(LiveRange* range);
1083  void PrintRangeOverview(std::ostream& os);
1084 
1085  LiveRangeQueue unhandled_live_ranges_;
1086  ZoneVector<LiveRange*> active_live_ranges_;
1087  ZoneVector<LiveRange*> inactive_live_ranges_;
1088 
1089 #ifdef DEBUG
1090  LifetimePosition allocation_finger_;
1091 #endif
1092 
1093  DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
1094 };
1095 
1096 class SpillSlotLocator final : public ZoneObject {
1097  public:
1098  explicit SpillSlotLocator(RegisterAllocationData* data);
1099 
1100  void LocateSpillSlots();
1101 
1102  private:
1103  RegisterAllocationData* data() const { return data_; }
1104 
1105  RegisterAllocationData* const data_;
1106 
1107  DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator);
1108 };
1109 
1110 class OperandAssigner final : public ZoneObject {
1111  public:
1112  explicit OperandAssigner(RegisterAllocationData* data);
1113 
1114  // Phase 5: assign spill splots.
1115  void AssignSpillSlots();
1116 
1117  // Phase 6: commit assignment.
1118  void CommitAssignment();
1119 
1120  private:
1121  RegisterAllocationData* data() const { return data_; }
1122 
1123  RegisterAllocationData* const data_;
1124 
1125  DISALLOW_COPY_AND_ASSIGN(OperandAssigner);
1126 };
1127 
1128 class ReferenceMapPopulator final : public ZoneObject {
1129  public:
1131 
1132  // Phase 7: compute values for pointer maps.
1133  void PopulateReferenceMaps();
1134 
1135  private:
1136  RegisterAllocationData* data() const { return data_; }
1137 
1138  bool SafePointsAreInOrder() const;
1139 
1140  RegisterAllocationData* const data_;
1141 
1142  DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
1143 };
1144 
1145 class LiveRangeBoundArray;
1146 // Insert moves of the form
1147 //
1148 // Operand(child_(k+1)) = Operand(child_k)
1149 //
1150 // where child_k and child_(k+1) are consecutive children of a range (so
1151 // child_k->next() == child_(k+1)), and Operand(...) refers to the
1152 // assigned operand, be it a register or a slot.
1153 class LiveRangeConnector final : public ZoneObject {
1154  public:
1156 
1157  // Phase 8: reconnect split ranges with moves, when the control flow
1158  // between the ranges is trivial (no branches).
1159  void ConnectRanges(Zone* local_zone);
1160 
1161  // Phase 9: insert moves to connect ranges across basic blocks, when the
1162  // control flow between them cannot be trivially resolved, such as joining
1163  // branches.
1164  void ResolveControlFlow(Zone* local_zone);
1165 
1166  private:
1167  RegisterAllocationData* data() const { return data_; }
1168  InstructionSequence* code() const { return data()->code(); }
1169  Zone* code_zone() const { return code()->zone(); }
1170 
1171  bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1172 
1173  int ResolveControlFlow(const InstructionBlock* block,
1174  const InstructionOperand& cur_op,
1175  const InstructionBlock* pred,
1176  const InstructionOperand& pred_op);
1177 
1178  void CommitSpillsInDeferredBlocks(TopLevelLiveRange* range,
1179  LiveRangeBoundArray* array,
1180  Zone* temp_zone);
1181 
1182  RegisterAllocationData* const data_;
1183 
1184  DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
1185 };
1186 
1187 } // namespace compiler
1188 } // namespace internal
1189 } // namespace v8
1190 
1191 #endif // V8_COMPILER_BACKEND_REGISTER_ALLOCATOR_H_
Definition: libplatform.h:13