V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
instruction.h
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_COMPILER_BACKEND_INSTRUCTION_H_
6 #define V8_COMPILER_BACKEND_INSTRUCTION_H_
7 
8 #include <deque>
9 #include <iosfwd>
10 #include <map>
11 #include <set>
12 
13 #include "src/base/compiler-specific.h"
14 #include "src/compiler/backend/instruction-codes.h"
15 #include "src/compiler/common-operator.h"
16 #include "src/compiler/frame.h"
17 #include "src/compiler/opcodes.h"
18 #include "src/double.h"
19 #include "src/globals.h"
20 #include "src/macro-assembler.h"
21 #include "src/source-position.h"
22 #include "src/zone/zone-allocator.h"
23 
24 namespace v8 {
25 namespace internal {
26 
27 class RegisterConfiguration;
28 
29 namespace compiler {
30 
31 class Schedule;
32 class SourcePositionTable;
33 
34 class V8_EXPORT_PRIVATE InstructionOperand {
35  public:
36  static const int kInvalidVirtualRegister = -1;
37 
38  enum Kind {
39  INVALID,
40  UNALLOCATED,
41  CONSTANT,
42  IMMEDIATE,
43  // Location operand kinds.
44  EXPLICIT,
45  ALLOCATED,
46  FIRST_LOCATION_OPERAND_KIND = EXPLICIT
47  // Location operand kinds must be last.
48  };
49 
51 
52  Kind kind() const { return KindField::decode(value_); }
53 
54 #define INSTRUCTION_OPERAND_PREDICATE(name, type) \
55  bool Is##name() const { return kind() == type; }
56  INSTRUCTION_OPERAND_PREDICATE(Invalid, INVALID)
57  // UnallocatedOperands are place-holder operands created before register
58  // allocation. They later are assigned registers and become AllocatedOperands.
59  INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED)
60  // Constant operands participate in register allocation. They are allocated to
61  // registers but have a special "spilling" behavior. When a ConstantOperand
62  // value must be rematerialized, it is loaded from an immediate constant
63  // rather from an unspilled slot.
64  INSTRUCTION_OPERAND_PREDICATE(Constant, CONSTANT)
65  // ImmediateOperands do not participate in register allocation and are only
66  // embedded directly in instructions, e.g. small integers and on some
67  // platforms Objects.
68  INSTRUCTION_OPERAND_PREDICATE(Immediate, IMMEDIATE)
69  // ExplicitOperands do not participate in register allocation. They are
70  // created by the instruction selector for direct access to registers and
71  // stack slots, completely bypassing the register allocator. They are never
72  // associated with a virtual register
73  INSTRUCTION_OPERAND_PREDICATE(Explicit, EXPLICIT)
74  // AllocatedOperands are registers or stack slots that are assigned by the
75  // register allocator and are always associated with a virtual register.
76  INSTRUCTION_OPERAND_PREDICATE(Allocated, ALLOCATED)
77 #undef INSTRUCTION_OPERAND_PREDICATE
78 
79  inline bool IsAnyLocationOperand() const;
80  inline bool IsLocationOperand() const;
81  inline bool IsFPLocationOperand() const;
82  inline bool IsAnyRegister() const;
83  inline bool IsRegister() const;
84  inline bool IsFPRegister() const;
85  inline bool IsFloatRegister() const;
86  inline bool IsDoubleRegister() const;
87  inline bool IsSimd128Register() const;
88  inline bool IsAnyStackSlot() const;
89  inline bool IsStackSlot() const;
90  inline bool IsFPStackSlot() const;
91  inline bool IsFloatStackSlot() const;
92  inline bool IsDoubleStackSlot() const;
93  inline bool IsSimd128StackSlot() const;
94 
95  template <typename SubKindOperand>
96  static SubKindOperand* New(Zone* zone, const SubKindOperand& op) {
97  void* buffer = zone->New(sizeof(op));
98  return new (buffer) SubKindOperand(op);
99  }
100 
101  static void ReplaceWith(InstructionOperand* dest,
102  const InstructionOperand* src) {
103  *dest = *src;
104  }
105 
106  bool Equals(const InstructionOperand& that) const {
107  return this->value_ == that.value_;
108  }
109 
110  bool Compare(const InstructionOperand& that) const {
111  return this->value_ < that.value_;
112  }
113 
114  bool EqualsCanonicalized(const InstructionOperand& that) const {
115  return this->GetCanonicalizedValue() == that.GetCanonicalizedValue();
116  }
117 
118  bool CompareCanonicalized(const InstructionOperand& that) const {
119  return this->GetCanonicalizedValue() < that.GetCanonicalizedValue();
120  }
121 
122  bool InterferesWith(const InstructionOperand& other) const;
123 
124  // APIs to aid debugging. For general-stream APIs, use operator<<.
125  void Print() const;
126 
127  protected:
128  explicit InstructionOperand(Kind kind) : value_(KindField::encode(kind)) {}
129 
130  inline uint64_t GetCanonicalizedValue() const;
131 
132  class KindField : public BitField64<Kind, 0, 3> {};
133 
134  uint64_t value_;
135 };
136 
138 
139 std::ostream& operator<<(std::ostream&, const InstructionOperand&);
140 
141 #define INSTRUCTION_OPERAND_CASTS(OperandType, OperandKind) \
142  \
143  static OperandType* cast(InstructionOperand* op) { \
144  DCHECK_EQ(OperandKind, op->kind()); \
145  return static_cast<OperandType*>(op); \
146  } \
147  \
148  static const OperandType* cast(const InstructionOperand* op) { \
149  DCHECK_EQ(OperandKind, op->kind()); \
150  return static_cast<const OperandType*>(op); \
151  } \
152  \
153  static OperandType cast(const InstructionOperand& op) { \
154  DCHECK_EQ(OperandKind, op.kind()); \
155  return *static_cast<const OperandType*>(&op); \
156  }
157 
159  public:
160  enum BasicPolicy { FIXED_SLOT, EXTENDED_POLICY };
161 
162  enum ExtendedPolicy {
163  NONE,
164  REGISTER_OR_SLOT,
165  REGISTER_OR_SLOT_OR_CONSTANT,
166  FIXED_REGISTER,
167  FIXED_FP_REGISTER,
168  MUST_HAVE_REGISTER,
169  MUST_HAVE_SLOT,
170  SAME_AS_FIRST_INPUT
171  };
172 
173  // Lifetime of operand inside the instruction.
174  enum Lifetime {
175  // USED_AT_START operand is guaranteed to be live only at instruction start.
176  // The register allocator is free to assign the same register to some other
177  // operand used inside instruction (i.e. temporary or output).
178  USED_AT_START,
179 
180  // USED_AT_END operand is treated as live until the end of instruction.
181  // This means that register allocator will not reuse its register for any
182  // other operand inside instruction.
183  USED_AT_END
184  };
185 
186  UnallocatedOperand(ExtendedPolicy policy, int virtual_register)
187  : UnallocatedOperand(virtual_register) {
188  value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
189  value_ |= ExtendedPolicyField::encode(policy);
190  value_ |= LifetimeField::encode(USED_AT_END);
191  }
192 
193  UnallocatedOperand(BasicPolicy policy, int index, int virtual_register)
194  : UnallocatedOperand(virtual_register) {
195  DCHECK(policy == FIXED_SLOT);
196  value_ |= BasicPolicyField::encode(policy);
197  value_ |= static_cast<int64_t>(index) << FixedSlotIndexField::kShift;
198  DCHECK(this->fixed_slot_index() == index);
199  }
200 
201  UnallocatedOperand(ExtendedPolicy policy, int index, int virtual_register)
202  : UnallocatedOperand(virtual_register) {
203  DCHECK(policy == FIXED_REGISTER || policy == FIXED_FP_REGISTER);
204  value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
205  value_ |= ExtendedPolicyField::encode(policy);
206  value_ |= LifetimeField::encode(USED_AT_END);
207  value_ |= FixedRegisterField::encode(index);
208  }
209 
210  UnallocatedOperand(ExtendedPolicy policy, Lifetime lifetime,
211  int virtual_register)
212  : UnallocatedOperand(virtual_register) {
213  value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
214  value_ |= ExtendedPolicyField::encode(policy);
215  value_ |= LifetimeField::encode(lifetime);
216  }
217 
218  UnallocatedOperand(int reg_id, int slot_id, int virtual_register)
219  : UnallocatedOperand(FIXED_REGISTER, reg_id, virtual_register) {
220  value_ |= HasSecondaryStorageField::encode(true);
221  value_ |= SecondaryStorageField::encode(slot_id);
222  }
223 
224  UnallocatedOperand(const UnallocatedOperand& other, int virtual_register) {
225  DCHECK_NE(kInvalidVirtualRegister, virtual_register);
226  value_ = VirtualRegisterField::update(
227  other.value_, static_cast<uint32_t>(virtual_register));
228  }
229 
230  // Predicates for the operand policy.
231  bool HasRegisterOrSlotPolicy() const {
232  return basic_policy() == EXTENDED_POLICY &&
233  extended_policy() == REGISTER_OR_SLOT;
234  }
235  bool HasRegisterOrSlotOrConstantPolicy() const {
236  return basic_policy() == EXTENDED_POLICY &&
237  extended_policy() == REGISTER_OR_SLOT_OR_CONSTANT;
238  }
239  bool HasFixedPolicy() const {
240  return basic_policy() == FIXED_SLOT ||
241  extended_policy() == FIXED_REGISTER ||
242  extended_policy() == FIXED_FP_REGISTER;
243  }
244  bool HasRegisterPolicy() const {
245  return basic_policy() == EXTENDED_POLICY &&
246  extended_policy() == MUST_HAVE_REGISTER;
247  }
248  bool HasSlotPolicy() const {
249  return basic_policy() == EXTENDED_POLICY &&
250  extended_policy() == MUST_HAVE_SLOT;
251  }
252  bool HasSameAsInputPolicy() const {
253  return basic_policy() == EXTENDED_POLICY &&
254  extended_policy() == SAME_AS_FIRST_INPUT;
255  }
256  bool HasFixedSlotPolicy() const { return basic_policy() == FIXED_SLOT; }
257  bool HasFixedRegisterPolicy() const {
258  return basic_policy() == EXTENDED_POLICY &&
259  extended_policy() == FIXED_REGISTER;
260  }
261  bool HasFixedFPRegisterPolicy() const {
262  return basic_policy() == EXTENDED_POLICY &&
263  extended_policy() == FIXED_FP_REGISTER;
264  }
265  bool HasSecondaryStorage() const {
266  return basic_policy() == EXTENDED_POLICY &&
267  extended_policy() == FIXED_REGISTER &&
268  HasSecondaryStorageField::decode(value_);
269  }
270  int GetSecondaryStorage() const {
271  DCHECK(HasSecondaryStorage());
272  return SecondaryStorageField::decode(value_);
273  }
274 
275  // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
276  BasicPolicy basic_policy() const { return BasicPolicyField::decode(value_); }
277 
278  // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
279  ExtendedPolicy extended_policy() const {
280  DCHECK(basic_policy() == EXTENDED_POLICY);
281  return ExtendedPolicyField::decode(value_);
282  }
283 
284  // [fixed_slot_index]: Only for FIXED_SLOT.
285  int fixed_slot_index() const {
286  DCHECK(HasFixedSlotPolicy());
287  return static_cast<int>(static_cast<int64_t>(value_) >>
288  FixedSlotIndexField::kShift);
289  }
290 
291  // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_FP_REGISTER.
292  int fixed_register_index() const {
293  DCHECK(HasFixedRegisterPolicy() || HasFixedFPRegisterPolicy());
294  return FixedRegisterField::decode(value_);
295  }
296 
297  // [virtual_register]: The virtual register ID for this operand.
298  int32_t virtual_register() const {
299  return static_cast<int32_t>(VirtualRegisterField::decode(value_));
300  }
301 
302  // [lifetime]: Only for non-FIXED_SLOT.
303  bool IsUsedAtStart() const {
304  DCHECK(basic_policy() == EXTENDED_POLICY);
305  return LifetimeField::decode(value_) == USED_AT_START;
306  }
307 
308  INSTRUCTION_OPERAND_CASTS(UnallocatedOperand, UNALLOCATED);
309 
310  // The encoding used for UnallocatedOperand operands depends on the policy
311  // that is
312  // stored within the operand. The FIXED_SLOT policy uses a compact encoding
313  // because it accommodates a larger pay-load.
314  //
315  // For FIXED_SLOT policy:
316  // +------------------------------------------------+
317  // | slot_index | 0 | virtual_register | 001 |
318  // +------------------------------------------------+
319  //
320  // For all other (extended) policies:
321  // +-----------------------------------------------------+
322  // | reg_index | L | PPP | 1 | virtual_register | 001 |
323  // +-----------------------------------------------------+
324  // L ... Lifetime
325  // P ... Policy
326  //
327  // The slot index is a signed value which requires us to decode it manually
328  // instead of using the BitField utility class.
329 
330  STATIC_ASSERT(KindField::kSize == 3);
331 
332  class VirtualRegisterField : public BitField64<uint32_t, 3, 32> {};
333 
334  // BitFields for all unallocated operands.
335  class BasicPolicyField : public BitField64<BasicPolicy, 35, 1> {};
336 
337  // BitFields specific to BasicPolicy::FIXED_SLOT.
338  class FixedSlotIndexField : public BitField64<int, 36, 28> {};
339 
340  // BitFields specific to BasicPolicy::EXTENDED_POLICY.
341  class ExtendedPolicyField : public BitField64<ExtendedPolicy, 36, 3> {};
342  class LifetimeField : public BitField64<Lifetime, 39, 1> {};
343  class HasSecondaryStorageField : public BitField64<bool, 40, 1> {};
344  class FixedRegisterField : public BitField64<int, 41, 6> {};
345  class SecondaryStorageField : public BitField64<int, 47, 3> {};
346 
347  private:
348  explicit UnallocatedOperand(int virtual_register)
349  : InstructionOperand(UNALLOCATED) {
350  value_ |=
351  VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register));
352  }
353 };
354 
356  public:
357  explicit ConstantOperand(int virtual_register)
358  : InstructionOperand(CONSTANT) {
359  value_ |=
360  VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register));
361  }
362 
363  int32_t virtual_register() const {
364  return static_cast<int32_t>(VirtualRegisterField::decode(value_));
365  }
366 
367  static ConstantOperand* New(Zone* zone, int virtual_register) {
368  return InstructionOperand::New(zone, ConstantOperand(virtual_register));
369  }
370 
371  INSTRUCTION_OPERAND_CASTS(ConstantOperand, CONSTANT);
372 
373  STATIC_ASSERT(KindField::kSize == 3);
374  class VirtualRegisterField : public BitField64<uint32_t, 3, 32> {};
375 };
376 
378  public:
379  enum ImmediateType { INLINE, INDEXED };
380 
381  explicit ImmediateOperand(ImmediateType type, int32_t value)
382  : InstructionOperand(IMMEDIATE) {
383  value_ |= TypeField::encode(type);
384  value_ |= static_cast<int64_t>(value) << ValueField::kShift;
385  }
386 
387  ImmediateType type() const { return TypeField::decode(value_); }
388 
389  int32_t inline_value() const {
390  DCHECK_EQ(INLINE, type());
391  return static_cast<int64_t>(value_) >> ValueField::kShift;
392  }
393 
394  int32_t indexed_value() const {
395  DCHECK_EQ(INDEXED, type());
396  return static_cast<int64_t>(value_) >> ValueField::kShift;
397  }
398 
399  static ImmediateOperand* New(Zone* zone, ImmediateType type, int32_t value) {
400  return InstructionOperand::New(zone, ImmediateOperand(type, value));
401  }
402 
403  INSTRUCTION_OPERAND_CASTS(ImmediateOperand, IMMEDIATE);
404 
405  STATIC_ASSERT(KindField::kSize == 3);
406  class TypeField : public BitField64<ImmediateType, 3, 1> {};
407  class ValueField : public BitField64<int32_t, 32, 32> {};
408 };
409 
411  public:
412  enum LocationKind { REGISTER, STACK_SLOT };
413 
414  LocationOperand(InstructionOperand::Kind operand_kind,
415  LocationOperand::LocationKind location_kind,
416  MachineRepresentation rep, int index)
417  : InstructionOperand(operand_kind) {
418  DCHECK_IMPLIES(location_kind == REGISTER, index >= 0);
419  DCHECK(IsSupportedRepresentation(rep));
420  value_ |= LocationKindField::encode(location_kind);
421  value_ |= RepresentationField::encode(rep);
422  value_ |= static_cast<int64_t>(index) << IndexField::kShift;
423  }
424 
425  int index() const {
426  DCHECK(IsStackSlot() || IsFPStackSlot());
427  return static_cast<int64_t>(value_) >> IndexField::kShift;
428  }
429 
430  int register_code() const {
431  DCHECK(IsRegister() || IsFPRegister());
432  return static_cast<int64_t>(value_) >> IndexField::kShift;
433  }
434 
435  Register GetRegister() const {
436  DCHECK(IsRegister());
437  return Register::from_code(register_code());
438  }
439 
440  FloatRegister GetFloatRegister() const {
441  DCHECK(IsFloatRegister());
442  return FloatRegister::from_code(register_code());
443  }
444 
445  DoubleRegister GetDoubleRegister() const {
446  // On platforms where FloatRegister, DoubleRegister, and Simd128Register
447  // are all the same type, it's convenient to treat everything as a
448  // DoubleRegister, so be lax about type checking here.
449  DCHECK(IsFPRegister());
450  return DoubleRegister::from_code(register_code());
451  }
452 
453  Simd128Register GetSimd128Register() const {
454  DCHECK(IsSimd128Register());
455  return Simd128Register::from_code(register_code());
456  }
457 
458  LocationKind location_kind() const {
459  return LocationKindField::decode(value_);
460  }
461 
462  MachineRepresentation representation() const {
463  return RepresentationField::decode(value_);
464  }
465 
466  static bool IsSupportedRepresentation(MachineRepresentation rep) {
467  switch (rep) {
468  case MachineRepresentation::kWord32:
469  case MachineRepresentation::kWord64:
470  case MachineRepresentation::kFloat32:
471  case MachineRepresentation::kFloat64:
472  case MachineRepresentation::kSimd128:
473  case MachineRepresentation::kTaggedSigned:
474  case MachineRepresentation::kTaggedPointer:
475  case MachineRepresentation::kTagged:
476  return true;
477  case MachineRepresentation::kBit:
478  case MachineRepresentation::kWord8:
479  case MachineRepresentation::kWord16:
480  case MachineRepresentation::kNone:
481  return false;
482  }
483  UNREACHABLE();
484  }
485 
486  // Return true if the locations can be moved to one another.
487  bool IsCompatible(LocationOperand* op);
488 
489  static LocationOperand* cast(InstructionOperand* op) {
490  DCHECK(op->IsAnyLocationOperand());
491  return static_cast<LocationOperand*>(op);
492  }
493 
494  static const LocationOperand* cast(const InstructionOperand* op) {
495  DCHECK(op->IsAnyLocationOperand());
496  return static_cast<const LocationOperand*>(op);
497  }
498 
499  static LocationOperand cast(const InstructionOperand& op) {
500  DCHECK(op.IsAnyLocationOperand());
501  return *static_cast<const LocationOperand*>(&op);
502  }
503 
504  STATIC_ASSERT(KindField::kSize == 3);
505  class LocationKindField : public BitField64<LocationKind, 3, 2> {};
506  class RepresentationField : public BitField64<MachineRepresentation, 5, 8> {};
507  class IndexField : public BitField64<int32_t, 35, 29> {};
508 };
509 
510 class V8_EXPORT_PRIVATE ExplicitOperand
511  : public NON_EXPORTED_BASE(LocationOperand) {
512  public:
513  ExplicitOperand(LocationKind kind, MachineRepresentation rep, int index);
514 
515  static ExplicitOperand* New(Zone* zone, LocationKind kind,
516  MachineRepresentation rep, int index) {
517  return InstructionOperand::New(zone, ExplicitOperand(kind, rep, index));
518  }
519 
520  INSTRUCTION_OPERAND_CASTS(ExplicitOperand, EXPLICIT);
521 };
522 
524  public:
525  AllocatedOperand(LocationKind kind, MachineRepresentation rep, int index)
526  : LocationOperand(ALLOCATED, kind, rep, index) {}
527 
528  static AllocatedOperand* New(Zone* zone, LocationKind kind,
529  MachineRepresentation rep, int index) {
530  return InstructionOperand::New(zone, AllocatedOperand(kind, rep, index));
531  }
532 
533  INSTRUCTION_OPERAND_CASTS(AllocatedOperand, ALLOCATED);
534 };
535 
536 #undef INSTRUCTION_OPERAND_CASTS
537 
538 bool InstructionOperand::IsAnyLocationOperand() const {
539  return this->kind() >= FIRST_LOCATION_OPERAND_KIND;
540 }
541 
542 bool InstructionOperand::IsLocationOperand() const {
543  return IsAnyLocationOperand() &&
544  !IsFloatingPoint(LocationOperand::cast(this)->representation());
545 }
546 
547 bool InstructionOperand::IsFPLocationOperand() const {
548  return IsAnyLocationOperand() &&
549  IsFloatingPoint(LocationOperand::cast(this)->representation());
550 }
551 
552 bool InstructionOperand::IsAnyRegister() const {
553  return IsAnyLocationOperand() &&
554  LocationOperand::cast(this)->location_kind() ==
555  LocationOperand::REGISTER;
556 }
557 
558 bool InstructionOperand::IsRegister() const {
559  return IsAnyRegister() &&
560  !IsFloatingPoint(LocationOperand::cast(this)->representation());
561 }
562 
563 bool InstructionOperand::IsFPRegister() const {
564  return IsAnyRegister() &&
565  IsFloatingPoint(LocationOperand::cast(this)->representation());
566 }
567 
568 bool InstructionOperand::IsFloatRegister() const {
569  return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
570  MachineRepresentation::kFloat32;
571 }
572 
573 bool InstructionOperand::IsDoubleRegister() const {
574  return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
575  MachineRepresentation::kFloat64;
576 }
577 
578 bool InstructionOperand::IsSimd128Register() const {
579  return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
580  MachineRepresentation::kSimd128;
581 }
582 
583 bool InstructionOperand::IsAnyStackSlot() const {
584  return IsAnyLocationOperand() &&
585  LocationOperand::cast(this)->location_kind() ==
586  LocationOperand::STACK_SLOT;
587 }
588 
589 bool InstructionOperand::IsStackSlot() const {
590  return IsAnyStackSlot() &&
591  !IsFloatingPoint(LocationOperand::cast(this)->representation());
592 }
593 
594 bool InstructionOperand::IsFPStackSlot() const {
595  return IsAnyStackSlot() &&
596  IsFloatingPoint(LocationOperand::cast(this)->representation());
597 }
598 
599 bool InstructionOperand::IsFloatStackSlot() const {
600  return IsAnyLocationOperand() &&
601  LocationOperand::cast(this)->location_kind() ==
602  LocationOperand::STACK_SLOT &&
603  LocationOperand::cast(this)->representation() ==
604  MachineRepresentation::kFloat32;
605 }
606 
607 bool InstructionOperand::IsDoubleStackSlot() const {
608  return IsAnyLocationOperand() &&
609  LocationOperand::cast(this)->location_kind() ==
610  LocationOperand::STACK_SLOT &&
611  LocationOperand::cast(this)->representation() ==
612  MachineRepresentation::kFloat64;
613 }
614 
615 bool InstructionOperand::IsSimd128StackSlot() const {
616  return IsAnyLocationOperand() &&
617  LocationOperand::cast(this)->location_kind() ==
618  LocationOperand::STACK_SLOT &&
619  LocationOperand::cast(this)->representation() ==
620  MachineRepresentation::kSimd128;
621 }
622 
623 uint64_t InstructionOperand::GetCanonicalizedValue() const {
624  if (IsAnyLocationOperand()) {
625  MachineRepresentation canonical = MachineRepresentation::kNone;
626  if (IsFPRegister()) {
627  if (kSimpleFPAliasing) {
628  // We treat all FP register operands the same for simple aliasing.
629  canonical = MachineRepresentation::kFloat64;
630  } else {
631  // We need to distinguish FP register operands of different reps when
632  // aliasing is not simple (e.g. ARM).
633  canonical = LocationOperand::cast(this)->representation();
634  }
635  }
636  return InstructionOperand::KindField::update(
637  LocationOperand::RepresentationField::update(this->value_, canonical),
638  LocationOperand::EXPLICIT);
639  }
640  return this->value_;
641 }
642 
643 // Required for maps that don't care about machine type.
645  bool operator()(const InstructionOperand& a,
646  const InstructionOperand& b) const {
647  return a.CompareCanonicalized(b);
648  }
649 };
650 
651 class V8_EXPORT_PRIVATE MoveOperands final
652  : public NON_EXPORTED_BASE(ZoneObject) {
653  public:
654  MoveOperands(const InstructionOperand& source,
655  const InstructionOperand& destination)
656  : source_(source), destination_(destination) {
657  DCHECK(!source.IsInvalid() && !destination.IsInvalid());
658  }
659 
660  const InstructionOperand& source() const { return source_; }
661  InstructionOperand& source() { return source_; }
662  void set_source(const InstructionOperand& operand) { source_ = operand; }
663 
664  const InstructionOperand& destination() const { return destination_; }
665  InstructionOperand& destination() { return destination_; }
666  void set_destination(const InstructionOperand& operand) {
667  destination_ = operand;
668  }
669 
670  // The gap resolver marks moves as "in-progress" by clearing the
671  // destination (but not the source).
672  bool IsPending() const {
673  return destination_.IsInvalid() && !source_.IsInvalid();
674  }
675  void SetPending() { destination_ = InstructionOperand(); }
676 
677  // A move is redundant if it's been eliminated or if its source and
678  // destination are the same.
679  bool IsRedundant() const {
680  DCHECK_IMPLIES(!destination_.IsInvalid(), !destination_.IsConstant());
681  return IsEliminated() || source_.EqualsCanonicalized(destination_);
682  }
683 
684  // We clear both operands to indicate move that's been eliminated.
685  void Eliminate() { source_ = destination_ = InstructionOperand(); }
686  bool IsEliminated() const {
687  DCHECK_IMPLIES(source_.IsInvalid(), destination_.IsInvalid());
688  return source_.IsInvalid();
689  }
690 
691  // APIs to aid debugging. For general-stream APIs, use operator<<.
692  void Print() const;
693 
694  private:
695  InstructionOperand source_;
696  InstructionOperand destination_;
697 
698  DISALLOW_COPY_AND_ASSIGN(MoveOperands);
699 };
700 
701 std::ostream& operator<<(std::ostream&, const MoveOperands&);
702 
703 class V8_EXPORT_PRIVATE ParallelMove final
704  : public NON_EXPORTED_BASE(ZoneVector<MoveOperands*>),
705  public NON_EXPORTED_BASE(ZoneObject) {
706  public:
707  explicit ParallelMove(Zone* zone) : ZoneVector<MoveOperands*>(zone) {
708  reserve(4);
709  }
710 
711  MoveOperands* AddMove(const InstructionOperand& from,
712  const InstructionOperand& to) {
713  Zone* zone = get_allocator().zone();
714  return AddMove(from, to, zone);
715  }
716 
717  MoveOperands* AddMove(const InstructionOperand& from,
718  const InstructionOperand& to,
719  Zone* operand_allocation_zone) {
720  MoveOperands* move = new (operand_allocation_zone) MoveOperands(from, to);
721  push_back(move);
722  return move;
723  }
724 
725  bool IsRedundant() const;
726 
727  // Prepare this ParallelMove to insert move as if it happened in a subsequent
728  // ParallelMove. move->source() may be changed. Any MoveOperands added to
729  // to_eliminate must be Eliminated.
730  void PrepareInsertAfter(MoveOperands* move,
731  ZoneVector<MoveOperands*>* to_eliminate) const;
732 
733  private:
734  DISALLOW_COPY_AND_ASSIGN(ParallelMove);
735 };
736 
737 std::ostream& operator<<(std::ostream&, const ParallelMove&);
738 
739 class ReferenceMap final : public ZoneObject {
740  public:
741  explicit ReferenceMap(Zone* zone)
742  : reference_operands_(8, zone), instruction_position_(-1) {}
743 
744  const ZoneVector<InstructionOperand>& reference_operands() const {
745  return reference_operands_;
746  }
747  int instruction_position() const { return instruction_position_; }
748 
749  void set_instruction_position(int pos) {
750  DCHECK_EQ(-1, instruction_position_);
751  instruction_position_ = pos;
752  }
753 
754  void RecordReference(const AllocatedOperand& op);
755 
756  private:
757  friend std::ostream& operator<<(std::ostream&, const ReferenceMap&);
758 
759  ZoneVector<InstructionOperand> reference_operands_;
760  int instruction_position_;
761 };
762 
763 std::ostream& operator<<(std::ostream&, const ReferenceMap&);
764 
765 class InstructionBlock;
766 
767 class V8_EXPORT_PRIVATE Instruction final {
768  public:
769  size_t OutputCount() const { return OutputCountField::decode(bit_field_); }
770  const InstructionOperand* OutputAt(size_t i) const {
771  DCHECK(i < OutputCount());
772  return &operands_[i];
773  }
774  InstructionOperand* OutputAt(size_t i) {
775  DCHECK(i < OutputCount());
776  return &operands_[i];
777  }
778 
779  bool HasOutput() const { return OutputCount() > 0; }
780  const InstructionOperand* Output() const { return OutputAt(0); }
781  InstructionOperand* Output() { return OutputAt(0); }
782 
783  size_t InputCount() const { return InputCountField::decode(bit_field_); }
784  const InstructionOperand* InputAt(size_t i) const {
785  DCHECK(i < InputCount());
786  return &operands_[OutputCount() + i];
787  }
788  InstructionOperand* InputAt(size_t i) {
789  DCHECK(i < InputCount());
790  return &operands_[OutputCount() + i];
791  }
792 
793  size_t TempCount() const { return TempCountField::decode(bit_field_); }
794  const InstructionOperand* TempAt(size_t i) const {
795  DCHECK(i < TempCount());
796  return &operands_[OutputCount() + InputCount() + i];
797  }
798  InstructionOperand* TempAt(size_t i) {
799  DCHECK(i < TempCount());
800  return &operands_[OutputCount() + InputCount() + i];
801  }
802 
803  InstructionCode opcode() const { return opcode_; }
804  ArchOpcode arch_opcode() const { return ArchOpcodeField::decode(opcode()); }
805  AddressingMode addressing_mode() const {
806  return AddressingModeField::decode(opcode());
807  }
808  FlagsMode flags_mode() const { return FlagsModeField::decode(opcode()); }
809  FlagsCondition flags_condition() const {
810  return FlagsConditionField::decode(opcode());
811  }
812 
813  static Instruction* New(Zone* zone, InstructionCode opcode) {
814  return New(zone, opcode, 0, nullptr, 0, nullptr, 0, nullptr);
815  }
816 
817  static Instruction* New(Zone* zone, InstructionCode opcode,
818  size_t output_count, InstructionOperand* outputs,
819  size_t input_count, InstructionOperand* inputs,
820  size_t temp_count, InstructionOperand* temps) {
821  DCHECK_LE(0, opcode);
822  DCHECK(output_count == 0 || outputs != nullptr);
823  DCHECK(input_count == 0 || inputs != nullptr);
824  DCHECK(temp_count == 0 || temps != nullptr);
825  // TODO(jarin/mstarzinger): Handle this gracefully. See crbug.com/582702.
826  CHECK(InputCountField::is_valid(input_count));
827 
828  size_t total_extra_ops = output_count + input_count + temp_count;
829  if (total_extra_ops != 0) total_extra_ops--;
830  int size = static_cast<int>(
831  RoundUp(sizeof(Instruction), sizeof(InstructionOperand)) +
832  total_extra_ops * sizeof(InstructionOperand));
833  return new (zone->New(size)) Instruction(
834  opcode, output_count, outputs, input_count, inputs, temp_count, temps);
835  }
836 
837  Instruction* MarkAsCall() {
838  bit_field_ = IsCallField::update(bit_field_, true);
839  return this;
840  }
841  bool IsCall() const { return IsCallField::decode(bit_field_); }
842  bool NeedsReferenceMap() const { return IsCall(); }
843  bool HasReferenceMap() const { return reference_map_ != nullptr; }
844 
845  bool ClobbersRegisters() const { return IsCall(); }
846  bool ClobbersTemps() const { return IsCall(); }
847  bool ClobbersDoubleRegisters() const { return IsCall(); }
848  ReferenceMap* reference_map() const { return reference_map_; }
849 
850  void set_reference_map(ReferenceMap* map) {
851  DCHECK(NeedsReferenceMap());
852  DCHECK(!reference_map_);
853  reference_map_ = map;
854  }
855 
856  void OverwriteWithNop() {
857  opcode_ = ArchOpcodeField::encode(kArchNop);
858  bit_field_ = 0;
859  reference_map_ = nullptr;
860  }
861 
862  bool IsNop() const { return arch_opcode() == kArchNop; }
863 
864  bool IsDeoptimizeCall() const {
865  return arch_opcode() == ArchOpcode::kArchDeoptimize ||
866  FlagsModeField::decode(opcode()) == kFlags_deoptimize ||
867  FlagsModeField::decode(opcode()) == kFlags_deoptimize_and_poison;
868  }
869 
870  bool IsTrap() const {
871  return FlagsModeField::decode(opcode()) == kFlags_trap;
872  }
873 
874  bool IsJump() const { return arch_opcode() == ArchOpcode::kArchJmp; }
875  bool IsRet() const { return arch_opcode() == ArchOpcode::kArchRet; }
876  bool IsTailCall() const {
877  return arch_opcode() == ArchOpcode::kArchTailCallCodeObject ||
878  arch_opcode() == ArchOpcode::kArchTailCallCodeObjectFromJSFunction ||
879  arch_opcode() == ArchOpcode::kArchTailCallAddress ||
880  arch_opcode() == ArchOpcode::kArchTailCallWasm;
881  }
882  bool IsThrow() const {
883  return arch_opcode() == ArchOpcode::kArchThrowTerminator;
884  }
885 
886  enum GapPosition {
887  START,
888  END,
889  FIRST_GAP_POSITION = START,
890  LAST_GAP_POSITION = END
891  };
892 
893  ParallelMove* GetOrCreateParallelMove(GapPosition pos, Zone* zone) {
894  if (parallel_moves_[pos] == nullptr) {
895  parallel_moves_[pos] = new (zone) ParallelMove(zone);
896  }
897  return parallel_moves_[pos];
898  }
899 
900  ParallelMove* GetParallelMove(GapPosition pos) {
901  return parallel_moves_[pos];
902  }
903 
904  const ParallelMove* GetParallelMove(GapPosition pos) const {
905  return parallel_moves_[pos];
906  }
907 
908  bool AreMovesRedundant() const;
909 
910  ParallelMove* const* parallel_moves() const { return &parallel_moves_[0]; }
911  ParallelMove** parallel_moves() { return &parallel_moves_[0]; }
912 
913  // The block_id may be invalidated in JumpThreading. It is only important for
914  // register allocation, to avoid searching for blocks from instruction
915  // indexes.
916  InstructionBlock* block() const { return block_; }
917  void set_block(InstructionBlock* block) {
918  DCHECK_NOT_NULL(block);
919  block_ = block;
920  }
921 
922  // APIs to aid debugging. For general-stream APIs, use operator<<.
923  void Print() const;
924 
928 
929  static const size_t kMaxOutputCount = OutputCountField::kMax;
930  static const size_t kMaxInputCount = InputCountField::kMax;
931  static const size_t kMaxTempCount = TempCountField::kMax;
932 
933  private:
934  explicit Instruction(InstructionCode opcode);
935 
936  Instruction(InstructionCode opcode, size_t output_count,
937  InstructionOperand* outputs, size_t input_count,
938  InstructionOperand* inputs, size_t temp_count,
939  InstructionOperand* temps);
940 
942 
943  InstructionCode opcode_;
944  uint32_t bit_field_;
945  ParallelMove* parallel_moves_[2];
946  ReferenceMap* reference_map_;
947  InstructionBlock* block_;
948  InstructionOperand operands_[1];
949 
950  DISALLOW_COPY_AND_ASSIGN(Instruction);
951 };
952 
953 std::ostream& operator<<(std::ostream&, const Instruction&);
954 
955 class RpoNumber final {
956  public:
957  static const int kInvalidRpoNumber = -1;
958  int ToInt() const {
959  DCHECK(IsValid());
960  return index_;
961  }
962  size_t ToSize() const {
963  DCHECK(IsValid());
964  return static_cast<size_t>(index_);
965  }
966  bool IsValid() const { return index_ >= 0; }
967  static RpoNumber FromInt(int index) { return RpoNumber(index); }
968  static RpoNumber Invalid() { return RpoNumber(kInvalidRpoNumber); }
969 
970  bool IsNext(const RpoNumber other) const {
971  DCHECK(IsValid());
972  return other.index_ == this->index_ + 1;
973  }
974 
975  // Comparison operators.
976  bool operator==(RpoNumber other) const { return index_ == other.index_; }
977  bool operator!=(RpoNumber other) const { return index_ != other.index_; }
978  bool operator>(RpoNumber other) const { return index_ > other.index_; }
979  bool operator<(RpoNumber other) const { return index_ < other.index_; }
980  bool operator<=(RpoNumber other) const { return index_ <= other.index_; }
981  bool operator>=(RpoNumber other) const { return index_ >= other.index_; }
982 
983  private:
984  explicit RpoNumber(int32_t index) : index_(index) {}
985  int32_t index_;
986 };
987 
988 std::ostream& operator<<(std::ostream&, const RpoNumber&);
989 
990 class V8_EXPORT_PRIVATE Constant final {
991  public:
992  enum Type {
993  kInt32,
994  kInt64,
995  kFloat32,
996  kFloat64,
997  kExternalReference,
998  kHeapObject,
999  kRpoNumber,
1000  kDelayedStringConstant
1001  };
1002 
1003  explicit Constant(int32_t v);
1004  explicit Constant(int64_t v) : type_(kInt64), value_(v) {}
1005  explicit Constant(float v) : type_(kFloat32), value_(bit_cast<int32_t>(v)) {}
1006  explicit Constant(double v) : type_(kFloat64), value_(bit_cast<int64_t>(v)) {}
1007  explicit Constant(ExternalReference ref)
1008  : type_(kExternalReference), value_(bit_cast<intptr_t>(ref.address())) {}
1009  explicit Constant(Handle<HeapObject> obj)
1010  : type_(kHeapObject), value_(bit_cast<intptr_t>(obj)) {}
1011  explicit Constant(RpoNumber rpo) : type_(kRpoNumber), value_(rpo.ToInt()) {}
1012  explicit Constant(const StringConstantBase* str)
1013  : type_(kDelayedStringConstant), value_(bit_cast<intptr_t>(str)) {}
1014  explicit Constant(RelocatablePtrConstantInfo info);
1015 
1016  Type type() const { return type_; }
1017 
1018  RelocInfo::Mode rmode() const { return rmode_; }
1019 
1020  int32_t ToInt32() const {
1021  DCHECK(type() == kInt32 || type() == kInt64);
1022  const int32_t value = static_cast<int32_t>(value_);
1023  DCHECK_EQ(value_, static_cast<int64_t>(value));
1024  return value;
1025  }
1026 
1027  int64_t ToInt64() const {
1028  if (type() == kInt32) return ToInt32();
1029  DCHECK_EQ(kInt64, type());
1030  return value_;
1031  }
1032 
1033  float ToFloat32() const {
1034  // TODO(ahaas): We should remove this function. If value_ has the bit
1035  // representation of a signalling NaN, then returning it as float can cause
1036  // the signalling bit to flip, and value_ is returned as a quiet NaN.
1037  DCHECK_EQ(kFloat32, type());
1038  return bit_cast<float>(static_cast<int32_t>(value_));
1039  }
1040 
1041  uint32_t ToFloat32AsInt() const {
1042  DCHECK_EQ(kFloat32, type());
1043  return bit_cast<uint32_t>(static_cast<int32_t>(value_));
1044  }
1045 
1046  Double ToFloat64() const {
1047  DCHECK_EQ(kFloat64, type());
1048  return Double(bit_cast<uint64_t>(value_));
1049  }
1050 
1051  ExternalReference ToExternalReference() const {
1052  DCHECK_EQ(kExternalReference, type());
1053  return ExternalReference::FromRawAddress(static_cast<Address>(value_));
1054  }
1055 
1056  RpoNumber ToRpoNumber() const {
1057  DCHECK_EQ(kRpoNumber, type());
1058  return RpoNumber::FromInt(static_cast<int>(value_));
1059  }
1060 
1061  Handle<HeapObject> ToHeapObject() const;
1062  Handle<Code> ToCode() const;
1063  const StringConstantBase* ToDelayedStringConstant() const;
1064 
1065  private:
1066  Type type_;
1067  RelocInfo::Mode rmode_ = RelocInfo::NONE;
1068  int64_t value_;
1069 };
1070 
1071 std::ostream& operator<<(std::ostream&, const Constant&);
1072 
1073 // Forward declarations.
1074 class FrameStateDescriptor;
1075 
1076 enum class StateValueKind : uint8_t {
1077  kArgumentsElements,
1078  kArgumentsLength,
1079  kPlain,
1080  kOptimizedOut,
1081  kNested,
1082  kDuplicate
1083 };
1084 
1086  public:
1088  : kind_(StateValueKind::kPlain), type_(MachineType::AnyTagged()) {}
1089 
1090  static StateValueDescriptor ArgumentsElements(ArgumentsStateType type) {
1091  StateValueDescriptor descr(StateValueKind::kArgumentsElements,
1092  MachineType::AnyTagged());
1093  descr.args_type_ = type;
1094  return descr;
1095  }
1096  static StateValueDescriptor ArgumentsLength(ArgumentsStateType type) {
1097  StateValueDescriptor descr(StateValueKind::kArgumentsLength,
1098  MachineType::AnyTagged());
1099  descr.args_type_ = type;
1100  return descr;
1101  }
1102  static StateValueDescriptor Plain(MachineType type) {
1103  return StateValueDescriptor(StateValueKind::kPlain, type);
1104  }
1105  static StateValueDescriptor OptimizedOut() {
1106  return StateValueDescriptor(StateValueKind::kOptimizedOut,
1107  MachineType::AnyTagged());
1108  }
1109  static StateValueDescriptor Recursive(size_t id) {
1110  StateValueDescriptor descr(StateValueKind::kNested,
1111  MachineType::AnyTagged());
1112  descr.id_ = id;
1113  return descr;
1114  }
1115  static StateValueDescriptor Duplicate(size_t id) {
1116  StateValueDescriptor descr(StateValueKind::kDuplicate,
1117  MachineType::AnyTagged());
1118  descr.id_ = id;
1119  return descr;
1120  }
1121 
1122  bool IsArgumentsElements() const {
1123  return kind_ == StateValueKind::kArgumentsElements;
1124  }
1125  bool IsArgumentsLength() const {
1126  return kind_ == StateValueKind::kArgumentsLength;
1127  }
1128  bool IsPlain() const { return kind_ == StateValueKind::kPlain; }
1129  bool IsOptimizedOut() const { return kind_ == StateValueKind::kOptimizedOut; }
1130  bool IsNested() const { return kind_ == StateValueKind::kNested; }
1131  bool IsDuplicate() const { return kind_ == StateValueKind::kDuplicate; }
1132  MachineType type() const { return type_; }
1133  size_t id() const {
1134  DCHECK(kind_ == StateValueKind::kDuplicate ||
1135  kind_ == StateValueKind::kNested);
1136  return id_;
1137  }
1138  ArgumentsStateType arguments_type() const {
1139  DCHECK(kind_ == StateValueKind::kArgumentsElements ||
1140  kind_ == StateValueKind::kArgumentsLength);
1141  return args_type_;
1142  }
1143 
1144  private:
1145  StateValueDescriptor(StateValueKind kind, MachineType type)
1146  : kind_(kind), type_(type) {}
1147 
1148  StateValueKind kind_;
1149  MachineType type_;
1150  union {
1151  size_t id_;
1152  ArgumentsStateType args_type_;
1153  };
1154 };
1155 
1157  public:
1158  explicit StateValueList(Zone* zone) : fields_(zone), nested_(zone) {}
1159 
1160  size_t size() { return fields_.size(); }
1161 
1162  struct Value {
1163  StateValueDescriptor* desc;
1164  StateValueList* nested;
1165 
1166  Value(StateValueDescriptor* desc, StateValueList* nested)
1167  : desc(desc), nested(nested) {}
1168  };
1169 
1170  class iterator {
1171  public:
1172  // Bare minimum of operators needed for range iteration.
1173  bool operator!=(const iterator& other) const {
1174  return field_iterator != other.field_iterator;
1175  }
1176  bool operator==(const iterator& other) const {
1177  return field_iterator == other.field_iterator;
1178  }
1179  iterator& operator++() {
1180  if (field_iterator->IsNested()) {
1181  nested_iterator++;
1182  }
1183  ++field_iterator;
1184  return *this;
1185  }
1186  Value operator*() {
1187  StateValueDescriptor* desc = &(*field_iterator);
1188  StateValueList* nested = desc->IsNested() ? *nested_iterator : nullptr;
1189  return Value(desc, nested);
1190  }
1191 
1192  private:
1193  friend class StateValueList;
1194 
1197  : field_iterator(it), nested_iterator(nested) {}
1198 
1200  ZoneVector<StateValueList*>::iterator nested_iterator;
1201  };
1202 
1203  void ReserveSize(size_t size) { fields_.reserve(size); }
1204 
1205  StateValueList* PushRecursiveField(Zone* zone, size_t id) {
1206  fields_.push_back(StateValueDescriptor::Recursive(id));
1207  StateValueList* nested =
1208  new (zone->New(sizeof(StateValueList))) StateValueList(zone);
1209  nested_.push_back(nested);
1210  return nested;
1211  }
1212  void PushArgumentsElements(ArgumentsStateType type) {
1213  fields_.push_back(StateValueDescriptor::ArgumentsElements(type));
1214  }
1215  void PushArgumentsLength(ArgumentsStateType type) {
1216  fields_.push_back(StateValueDescriptor::ArgumentsLength(type));
1217  }
1218  void PushDuplicate(size_t id) {
1219  fields_.push_back(StateValueDescriptor::Duplicate(id));
1220  }
1221  void PushPlain(MachineType type) {
1222  fields_.push_back(StateValueDescriptor::Plain(type));
1223  }
1224  void PushOptimizedOut() {
1225  fields_.push_back(StateValueDescriptor::OptimizedOut());
1226  }
1227 
1228  iterator begin() { return iterator(fields_.begin(), nested_.begin()); }
1229  iterator end() { return iterator(fields_.end(), nested_.end()); }
1230 
1231  private:
1232  ZoneVector<StateValueDescriptor> fields_;
1233  ZoneVector<StateValueList*> nested_;
1234 };
1235 
1237  public:
1238  FrameStateDescriptor(Zone* zone, FrameStateType type, BailoutId bailout_id,
1239  OutputFrameStateCombine state_combine,
1240  size_t parameters_count, size_t locals_count,
1241  size_t stack_count,
1242  MaybeHandle<SharedFunctionInfo> shared_info,
1243  FrameStateDescriptor* outer_state = nullptr);
1244 
1245  FrameStateType type() const { return type_; }
1246  BailoutId bailout_id() const { return bailout_id_; }
1247  OutputFrameStateCombine state_combine() const { return frame_state_combine_; }
1248  size_t parameters_count() const { return parameters_count_; }
1249  size_t locals_count() const { return locals_count_; }
1250  size_t stack_count() const { return stack_count_; }
1251  MaybeHandle<SharedFunctionInfo> shared_info() const { return shared_info_; }
1252  FrameStateDescriptor* outer_state() const { return outer_state_; }
1253  bool HasContext() const {
1254  return FrameStateFunctionInfo::IsJSFunctionType(type_) ||
1255  type_ == FrameStateType::kBuiltinContinuation ||
1256  type_ == FrameStateType::kConstructStub;
1257  }
1258 
1259  size_t GetSize() const;
1260  size_t GetTotalSize() const;
1261  size_t GetFrameCount() const;
1262  size_t GetJSFrameCount() const;
1263 
1264  StateValueList* GetStateValueDescriptors() { return &values_; }
1265 
1266  static const int kImpossibleValue = 0xdead;
1267 
1268  private:
1269  FrameStateType type_;
1270  BailoutId bailout_id_;
1271  OutputFrameStateCombine frame_state_combine_;
1272  size_t parameters_count_;
1273  size_t locals_count_;
1274  size_t stack_count_;
1275  StateValueList values_;
1276  MaybeHandle<SharedFunctionInfo> const shared_info_;
1277  FrameStateDescriptor* outer_state_;
1278 };
1279 
1280 // A deoptimization entry is a pair of the reason why we deoptimize and the
1281 // frame state descriptor that we have to go back to.
1282 class DeoptimizationEntry final {
1283  public:
1284  DeoptimizationEntry() = default;
1285  DeoptimizationEntry(FrameStateDescriptor* descriptor, DeoptimizeKind kind,
1286  DeoptimizeReason reason, VectorSlotPair const& feedback)
1287  : descriptor_(descriptor),
1288  kind_(kind),
1289  reason_(reason),
1290  feedback_(feedback) {}
1291 
1292  FrameStateDescriptor* descriptor() const { return descriptor_; }
1293  DeoptimizeKind kind() const { return kind_; }
1294  DeoptimizeReason reason() const { return reason_; }
1295  VectorSlotPair const& feedback() const { return feedback_; }
1296 
1297  private:
1298  FrameStateDescriptor* descriptor_ = nullptr;
1299  DeoptimizeKind kind_ = DeoptimizeKind::kEager;
1300  DeoptimizeReason reason_ = DeoptimizeReason::kUnknown;
1301  VectorSlotPair feedback_ = VectorSlotPair();
1302 };
1303 
1305 
1306 class V8_EXPORT_PRIVATE PhiInstruction final
1307  : public NON_EXPORTED_BASE(ZoneObject) {
1308  public:
1310 
1311  PhiInstruction(Zone* zone, int virtual_register, size_t input_count);
1312 
1313  void SetInput(size_t offset, int virtual_register);
1314  void RenameInput(size_t offset, int virtual_register);
1315 
1316  int virtual_register() const { return virtual_register_; }
1317  const IntVector& operands() const { return operands_; }
1318 
1319  // TODO(dcarney): this has no real business being here, since it's internal to
1320  // the register allocator, but putting it here was convenient.
1321  const InstructionOperand& output() const { return output_; }
1322  InstructionOperand& output() { return output_; }
1323 
1324  private:
1325  const int virtual_register_;
1326  InstructionOperand output_;
1327  IntVector operands_;
1328 };
1329 
1330 // Analogue of BasicBlock for Instructions instead of Nodes.
1331 class V8_EXPORT_PRIVATE InstructionBlock final
1332  : public NON_EXPORTED_BASE(ZoneObject) {
1333  public:
1334  InstructionBlock(Zone* zone, RpoNumber rpo_number, RpoNumber loop_header,
1335  RpoNumber loop_end, bool deferred, bool handler);
1336 
1337  // Instruction indexes (used by the register allocator).
1338  int first_instruction_index() const {
1339  DCHECK_LE(0, code_start_);
1340  DCHECK_LT(0, code_end_);
1341  DCHECK_GE(code_end_, code_start_);
1342  return code_start_;
1343  }
1344  int last_instruction_index() const {
1345  DCHECK_LE(0, code_start_);
1346  DCHECK_LT(0, code_end_);
1347  DCHECK_GE(code_end_, code_start_);
1348  return code_end_ - 1;
1349  }
1350 
1351  int32_t code_start() const { return code_start_; }
1352  void set_code_start(int32_t start) { code_start_ = start; }
1353 
1354  int32_t code_end() const { return code_end_; }
1355  void set_code_end(int32_t end) { code_end_ = end; }
1356 
1357  bool IsDeferred() const { return deferred_; }
1358  bool IsHandler() const { return handler_; }
1359 
1360  RpoNumber ao_number() const { return ao_number_; }
1361  RpoNumber rpo_number() const { return rpo_number_; }
1362  RpoNumber loop_header() const { return loop_header_; }
1363  RpoNumber loop_end() const {
1364  DCHECK(IsLoopHeader());
1365  return loop_end_;
1366  }
1367  inline bool IsLoopHeader() const { return loop_end_.IsValid(); }
1368  inline bool ShouldAlign() const { return alignment_; }
1369 
1371  Predecessors& predecessors() { return predecessors_; }
1372  const Predecessors& predecessors() const { return predecessors_; }
1373  size_t PredecessorCount() const { return predecessors_.size(); }
1374  size_t PredecessorIndexOf(RpoNumber rpo_number) const;
1375 
1377  Successors& successors() { return successors_; }
1378  const Successors& successors() const { return successors_; }
1379  size_t SuccessorCount() const { return successors_.size(); }
1380 
1382  const PhiInstructions& phis() const { return phis_; }
1383  PhiInstruction* PhiAt(size_t i) const { return phis_[i]; }
1384  void AddPhi(PhiInstruction* phi) { phis_.push_back(phi); }
1385 
1386  void set_ao_number(RpoNumber ao_number) { ao_number_ = ao_number; }
1387 
1388  void set_alignment(bool val) { alignment_ = val; }
1389 
1390  bool needs_frame() const { return needs_frame_; }
1391  void mark_needs_frame() { needs_frame_ = true; }
1392 
1393  bool must_construct_frame() const { return must_construct_frame_; }
1394  void mark_must_construct_frame() { must_construct_frame_ = true; }
1395 
1396  bool must_deconstruct_frame() const { return must_deconstruct_frame_; }
1397  void mark_must_deconstruct_frame() { must_deconstruct_frame_ = true; }
1398 
1399  private:
1400  Successors successors_;
1401  Predecessors predecessors_;
1402  PhiInstructions phis_;
1403  RpoNumber ao_number_; // Assembly order number.
1404  const RpoNumber rpo_number_;
1405  const RpoNumber loop_header_;
1406  const RpoNumber loop_end_;
1407  int32_t code_start_; // start index of arch-specific code.
1408  int32_t code_end_ = -1; // end index of arch-specific code.
1409  const bool deferred_ = -1; // Block contains deferred code.
1410  const bool handler_; // Block is a handler entry point.
1411  bool alignment_ = false; // insert alignment before this block
1412  bool needs_frame_ = false;
1413  bool must_construct_frame_ = false;
1414  bool must_deconstruct_frame_ = false;
1415 };
1416 
1417 class InstructionSequence;
1418 
1420  const InstructionBlock* block_;
1421  const InstructionSequence* code_;
1422 };
1423 
1424 std::ostream& operator<<(std::ostream&, const PrintableInstructionBlock&);
1425 
1427 typedef std::map<int, Constant, std::less<int>,
1429  ConstantMap;
1430 
1434 
1435 // Represents architecture-specific generated code before, during, and after
1436 // register allocation.
1437 class V8_EXPORT_PRIVATE InstructionSequence final
1438  : public NON_EXPORTED_BASE(ZoneObject) {
1439  public:
1440  static InstructionBlocks* InstructionBlocksFor(Zone* zone,
1441  const Schedule* schedule);
1442  InstructionSequence(Isolate* isolate, Zone* zone,
1443  InstructionBlocks* instruction_blocks);
1444 
1445  int NextVirtualRegister();
1446  int VirtualRegisterCount() const { return next_virtual_register_; }
1447 
1448  const InstructionBlocks& instruction_blocks() const {
1449  return *instruction_blocks_;
1450  }
1451 
1452  const InstructionBlocks& ao_blocks() const { return *ao_blocks_; }
1453 
1454  int InstructionBlockCount() const {
1455  return static_cast<int>(instruction_blocks_->size());
1456  }
1457 
1458  InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) {
1459  return instruction_blocks_->at(rpo_number.ToSize());
1460  }
1461 
1462  int LastLoopInstructionIndex(const InstructionBlock* block) {
1463  return instruction_blocks_->at(block->loop_end().ToSize() - 1)
1464  ->last_instruction_index();
1465  }
1466 
1467  const InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) const {
1468  return instruction_blocks_->at(rpo_number.ToSize());
1469  }
1470 
1471  InstructionBlock* GetInstructionBlock(int instruction_index) const;
1472 
1473  static MachineRepresentation DefaultRepresentation() {
1474  return MachineType::PointerRepresentation();
1475  }
1476  MachineRepresentation GetRepresentation(int virtual_register) const;
1477  void MarkAsRepresentation(MachineRepresentation rep, int virtual_register);
1478 
1479  bool IsReference(int virtual_register) const {
1480  return CanBeTaggedPointer(GetRepresentation(virtual_register));
1481  }
1482  bool IsFP(int virtual_register) const {
1483  return IsFloatingPoint(GetRepresentation(virtual_register));
1484  }
1485  int representation_mask() const { return representation_mask_; }
1486  bool HasFPVirtualRegisters() const {
1487  constexpr int kFPRepMask =
1488  RepresentationBit(MachineRepresentation::kFloat32) |
1489  RepresentationBit(MachineRepresentation::kFloat64) |
1490  RepresentationBit(MachineRepresentation::kSimd128);
1491  return (representation_mask() & kFPRepMask) != 0;
1492  }
1493 
1494  Instruction* GetBlockStart(RpoNumber rpo) const;
1495 
1496  typedef InstructionDeque::const_iterator const_iterator;
1497  const_iterator begin() const { return instructions_.begin(); }
1498  const_iterator end() const { return instructions_.end(); }
1499  const InstructionDeque& instructions() const { return instructions_; }
1500  int LastInstructionIndex() const {
1501  return static_cast<int>(instructions().size()) - 1;
1502  }
1503 
1504  Instruction* InstructionAt(int index) const {
1505  DCHECK_LE(0, index);
1506  DCHECK_GT(instructions_.size(), index);
1507  return instructions_[index];
1508  }
1509 
1510  Isolate* isolate() const { return isolate_; }
1511  const ReferenceMapDeque* reference_maps() const { return &reference_maps_; }
1512  Zone* zone() const { return zone_; }
1513 
1514  // Used by the instruction selector while adding instructions.
1515  int AddInstruction(Instruction* instr);
1516  void StartBlock(RpoNumber rpo);
1517  void EndBlock(RpoNumber rpo);
1518 
1519  int AddConstant(int virtual_register, Constant constant) {
1520  // TODO(titzer): allow RPO numbers as constants?
1521  DCHECK_NE(Constant::kRpoNumber, constant.type());
1522  DCHECK(virtual_register >= 0 && virtual_register < next_virtual_register_);
1523  DCHECK(constants_.find(virtual_register) == constants_.end());
1524  constants_.insert(std::make_pair(virtual_register, constant));
1525  return virtual_register;
1526  }
1527  Constant GetConstant(int virtual_register) const {
1528  ConstantMap::const_iterator it = constants_.find(virtual_register);
1529  DCHECK(it != constants_.end());
1530  DCHECK_EQ(virtual_register, it->first);
1531  return it->second;
1532  }
1533 
1535  Immediates& immediates() { return immediates_; }
1536 
1537  ImmediateOperand AddImmediate(const Constant& constant) {
1538  if (constant.type() == Constant::kInt32 &&
1539  RelocInfo::IsNone(constant.rmode())) {
1540  return ImmediateOperand(ImmediateOperand::INLINE, constant.ToInt32());
1541  }
1542  int index = static_cast<int>(immediates_.size());
1543  immediates_.push_back(constant);
1544  return ImmediateOperand(ImmediateOperand::INDEXED, index);
1545  }
1546 
1547  Constant GetImmediate(const ImmediateOperand* op) const {
1548  switch (op->type()) {
1549  case ImmediateOperand::INLINE:
1550  return Constant(op->inline_value());
1551  case ImmediateOperand::INDEXED: {
1552  int index = op->indexed_value();
1553  DCHECK_LE(0, index);
1554  DCHECK_GT(immediates_.size(), index);
1555  return immediates_[index];
1556  }
1557  }
1558  UNREACHABLE();
1559  }
1560 
1561  int AddDeoptimizationEntry(FrameStateDescriptor* descriptor,
1562  DeoptimizeKind kind, DeoptimizeReason reason,
1563  VectorSlotPair const& feedback);
1564  DeoptimizationEntry const& GetDeoptimizationEntry(int deoptimization_id);
1565  int GetDeoptimizationEntryCount() const {
1566  return static_cast<int>(deoptimization_entries_.size());
1567  }
1568 
1569  RpoNumber InputRpo(Instruction* instr, size_t index);
1570 
1571  bool GetSourcePosition(const Instruction* instr,
1572  SourcePosition* result) const;
1573  void SetSourcePosition(const Instruction* instr, SourcePosition value);
1574 
1575  bool ContainsCall() const {
1576  for (Instruction* instr : instructions_) {
1577  if (instr->IsCall()) return true;
1578  }
1579  return false;
1580  }
1581 
1582  // APIs to aid debugging. For general-stream APIs, use operator<<.
1583  void Print() const;
1584 
1585  void PrintBlock(int block_id) const;
1586 
1587  void ValidateEdgeSplitForm() const;
1588  void ValidateDeferredBlockExitPaths() const;
1589  void ValidateDeferredBlockEntryPaths() const;
1590  void ValidateSSA() const;
1591 
1592  static void SetRegisterConfigurationForTesting(
1593  const RegisterConfiguration* regConfig);
1594  static void ClearRegisterConfigurationForTesting();
1595 
1596  void RecomputeAssemblyOrderForTesting();
1597 
1598  private:
1599  friend V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&,
1600  const InstructionSequence&);
1601 
1603 
1604  static const RegisterConfiguration* RegisterConfigurationForTesting();
1605  static const RegisterConfiguration* registerConfigurationForTesting_;
1606 
1607  // Puts the deferred blocks last and may rotate loops.
1608  void ComputeAssemblyOrder();
1609 
1610  Isolate* isolate_;
1611  Zone* const zone_;
1612  InstructionBlocks* const instruction_blocks_;
1613  InstructionBlocks* ao_blocks_;
1614  SourcePositionMap source_positions_;
1615  ConstantMap constants_;
1616  Immediates immediates_;
1617  InstructionDeque instructions_;
1618  int next_virtual_register_;
1619  ReferenceMapDeque reference_maps_;
1620  ZoneVector<MachineRepresentation> representations_;
1621  int representation_mask_;
1622  DeoptimizationVector deoptimization_entries_;
1623 
1624  // Used at construction time
1625  InstructionBlock* current_block_;
1626 
1627  DISALLOW_COPY_AND_ASSIGN(InstructionSequence);
1628 };
1629 
1630 V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&,
1631  const InstructionSequence&);
1632 
1633 } // namespace compiler
1634 } // namespace internal
1635 } // namespace v8
1636 
1637 #endif // V8_COMPILER_BACKEND_INSTRUCTION_H_
Definition: libplatform.h:13