5 #ifndef V8_COMPILER_BACKEND_INSTRUCTION_SELECTOR_H_ 6 #define V8_COMPILER_BACKEND_INSTRUCTION_SELECTOR_H_ 10 #include "src/compiler/backend/instruction-scheduler.h" 11 #include "src/compiler/backend/instruction.h" 12 #include "src/compiler/common-operator.h" 13 #include "src/compiler/linkage.h" 14 #include "src/compiler/machine-operator.h" 15 #include "src/compiler/node.h" 16 #include "src/globals.h" 17 #include "src/zone/zone-containers.h" 27 class OperandGenerator;
29 class StateObjectDeduplicator;
58 DeoptimizeReason reason,
62 feedback, frame_state);
67 FlagsCondition condition, DeoptimizeKind kind, DeoptimizeReason reason,
70 reason, feedback, frame_state);
84 bool IsNone()
const {
return mode_ == kFlags_none; }
85 bool IsBranch()
const {
86 return mode_ == kFlags_branch || mode_ == kFlags_branch_and_poison;
88 bool IsDeoptimize()
const {
89 return mode_ == kFlags_deoptimize || mode_ == kFlags_deoptimize_and_poison;
91 bool IsPoisoned()
const {
92 return mode_ == kFlags_branch_and_poison ||
93 mode_ == kFlags_deoptimize_and_poison;
95 bool IsSet()
const {
return mode_ == kFlags_set; }
96 bool IsTrap()
const {
return mode_ == kFlags_trap; }
97 FlagsCondition condition()
const {
101 DeoptimizeKind kind()
const {
102 DCHECK(IsDeoptimize());
105 DeoptimizeReason reason()
const {
106 DCHECK(IsDeoptimize());
110 DCHECK(IsDeoptimize());
113 Node* frame_state()
const {
114 DCHECK(IsDeoptimize());
115 return frame_state_or_result_;
117 Node* result()
const {
119 return frame_state_or_result_;
121 TrapId trap_id()
const {
136 condition_ = NegateFlagsCondition(condition_);
141 condition_ = CommuteFlagsCondition(condition_);
144 void Overwrite(FlagsCondition condition) { condition_ = condition; }
146 void OverwriteAndNegateIfEqual(FlagsCondition condition) {
147 DCHECK(condition_ == kEqual || condition_ == kNotEqual);
148 bool negate = condition_ == kEqual;
149 condition_ = condition;
150 if (negate) Negate();
153 void OverwriteUnsignedIfSigned() {
154 switch (condition_) {
155 case kSignedLessThan:
156 condition_ = kUnsignedLessThan;
158 case kSignedLessThanOrEqual:
159 condition_ = kUnsignedLessThanOrEqual;
161 case kSignedGreaterThan:
162 condition_ = kUnsignedGreaterThan;
164 case kSignedGreaterThanOrEqual:
165 condition_ = kUnsignedGreaterThanOrEqual;
173 InstructionCode Encode(InstructionCode opcode) {
174 opcode |= FlagsModeField::encode(mode_);
175 if (mode_ != kFlags_none) {
176 opcode |= FlagsConditionField::encode(condition_);
185 condition_(condition),
186 true_block_(true_block),
187 false_block_(false_block) {
188 DCHECK(mode == kFlags_branch || mode == kFlags_branch_and_poison);
189 DCHECK_NOT_NULL(true_block);
190 DCHECK_NOT_NULL(false_block);
194 DeoptimizeKind kind, DeoptimizeReason reason,
197 condition_(condition),
201 frame_state_or_result_(frame_state) {
202 DCHECK(mode == kFlags_deoptimize || mode == kFlags_deoptimize_and_poison);
203 DCHECK_NOT_NULL(frame_state);
208 condition_(condition),
209 frame_state_or_result_(result) {
210 DCHECK_NOT_NULL(result);
214 : mode_(kFlags_trap),
215 condition_(condition),
216 frame_state_or_result_(result),
218 DCHECK_NOT_NULL(result);
221 FlagsMode
const mode_;
222 FlagsCondition condition_;
223 DeoptimizeKind kind_;
224 DeoptimizeReason reason_;
226 Node* frame_state_or_result_;
238 : node(n), location(l) {}
244 enum class FrameStateInputKind { kAny, kStackSlot };
252 enum SourcePositionMode { kCallSourcePositions, kAllSourcePositions };
253 enum EnableScheduling { kDisableScheduling, kEnableScheduling };
254 enum EnableRootsRelativeAddressing {
255 kDisableRootsRelativeAddressing,
256 kEnableRootsRelativeAddressing
258 enum EnableSwitchJumpTable {
259 kDisableSwitchJumpTable,
260 kEnableSwitchJumpTable
262 enum EnableTraceTurboJson { kDisableTraceTurboJson, kEnableTraceTurboJson };
268 EnableSwitchJumpTable enable_switch_jump_table,
269 SourcePositionMode source_position_mode = kCallSourcePositions,
270 Features features = SupportedFeatures(),
271 EnableScheduling enable_scheduling = FLAG_turbo_instruction_scheduling
273 : kDisableScheduling,
274 EnableRootsRelativeAddressing enable_roots_relative_addressing =
275 kDisableRootsRelativeAddressing,
276 PoisoningMitigationLevel poisoning_level =
277 PoisoningMitigationLevel::kDontPoison,
278 EnableTraceTurboJson trace_turbo = kDisableTraceTurboJson);
281 bool SelectInstructions();
318 Instruction* Emit(InstructionCode opcode,
size_t output_count,
326 Instruction* EmitWithContinuation(InstructionCode opcode,
328 Instruction* EmitWithContinuation(InstructionCode opcode,
331 Instruction* EmitWithContinuation(InstructionCode opcode,
334 Instruction* EmitWithContinuation(InstructionCode opcode,
338 Instruction* EmitWithContinuation(InstructionCode opcode,
size_t output_count,
347 Instruction* EmitDeoptimize(InstructionCode opcode,
size_t output_count,
350 DeoptimizeReason reason,
361 explicit Features(
unsigned bits) : bits_(bits) {}
362 explicit Features(CpuFeature f) : bits_(1u << f) {}
363 Features(CpuFeature f1, CpuFeature f2) : bits_((1u << f1) | (1u << f2)) {}
365 bool Contains(CpuFeature f)
const {
return (bits_ & (1u << f)); }
371 bool IsSupported(CpuFeature feature)
const {
372 return features_.Contains(feature);
376 static Features SupportedFeatures() {
377 return Features(CpuFeatures::SupportedFeatures());
381 static MachineOperatorBuilder::Flags SupportedMachineOperatorFlags();
383 static MachineOperatorBuilder::AlignmentRequirements AlignmentRequirements();
385 bool NeedsPoisoning(IsSafetyCheck safety_check)
const;
395 bool CanCover(
Node* user,
Node* node)
const;
399 bool CanCoverTransitively(
Node* user,
Node* node,
Node* node_input)
const;
424 bool IsOnlyUserOfNodeInSameBlock(
Node* user,
Node* node)
const;
428 bool IsDefined(
Node* node)
const;
432 bool IsUsed(
Node* node)
const;
435 bool IsLive(
Node* node)
const {
return !IsDefined(node) && IsUsed(node); }
438 int GetEffectLevel(Node* node)
const;
440 int GetVirtualRegister(
const Node* node);
441 const std::map<NodeId, int> GetVirtualRegistersForTesting()
const;
445 bool CanAddressRelativeToRootsRegister()
const;
447 bool CanUseRootsRegister()
const;
449 Isolate* isolate()
const {
return sequence()->isolate(); }
451 const ZoneVector<std::pair<int, int>>& instr_origins()
const {
452 return instr_origins_;
456 static void CanonicalizeShuffleForTesting(
bool inputs_equal, uint8_t* shuffle,
459 CanonicalizeShuffle(inputs_equal, shuffle, needs_swap, is_swizzle);
462 static bool TryMatchIdentityForTesting(
const uint8_t* shuffle) {
463 return TryMatchIdentity(shuffle);
466 static bool TryMatchDupForTesting(
const uint8_t* shuffle,
int* index) {
467 return TryMatchDup<LANES>(shuffle, index);
469 static bool TryMatch32x4ShuffleForTesting(
const uint8_t* shuffle,
470 uint8_t* shuffle32x4) {
471 return TryMatch32x4Shuffle(shuffle, shuffle32x4);
473 static bool TryMatch16x8ShuffleForTesting(
const uint8_t* shuffle,
474 uint8_t* shuffle16x8) {
475 return TryMatch16x8Shuffle(shuffle, shuffle16x8);
477 static bool TryMatchConcatForTesting(
const uint8_t* shuffle,
479 return TryMatchConcat(shuffle, offset);
481 static bool TryMatchBlendForTesting(
const uint8_t* shuffle) {
482 return TryMatchBlend(shuffle);
486 friend class OperandGenerator;
488 bool UseInstructionScheduling()
const {
489 return (enable_scheduling_ == kEnableScheduling) &&
490 InstructionScheduler::SchedulerSupported();
493 void AppendDeoptimizeArguments(InstructionOperandVector* args,
494 DeoptimizeKind kind, DeoptimizeReason reason,
495 VectorSlotPair
const& feedback,
498 void EmitTableSwitch(
const SwitchInfo& sw, InstructionOperand& index_operand);
499 void EmitLookupSwitch(
const SwitchInfo& sw,
500 InstructionOperand& value_operand);
501 void EmitBinarySearchSwitch(
const SwitchInfo& sw,
502 InstructionOperand& value_operand);
504 void TryRename(InstructionOperand* op);
505 int GetRename(
int virtual_register);
506 void SetRename(
const Node* node,
const Node* rename);
507 void UpdateRenames(Instruction* instruction);
508 void UpdateRenamesInPhi(PhiInstruction* phi);
511 void MarkAsDefined(Node* node);
515 void MarkAsUsed(Node* node);
518 void SetEffectLevel(Node* node,
int effect_level);
522 void MarkAsRepresentation(MachineRepresentation rep, Node* node);
523 void MarkAsWord32(Node* node) {
524 MarkAsRepresentation(MachineRepresentation::kWord32, node);
526 void MarkAsWord64(Node* node) {
527 MarkAsRepresentation(MachineRepresentation::kWord64, node);
529 void MarkAsFloat32(Node* node) {
530 MarkAsRepresentation(MachineRepresentation::kFloat32, node);
532 void MarkAsFloat64(Node* node) {
533 MarkAsRepresentation(MachineRepresentation::kFloat64, node);
535 void MarkAsSimd128(Node* node) {
536 MarkAsRepresentation(MachineRepresentation::kSimd128, node);
538 void MarkAsReference(Node* node) {
539 MarkAsRepresentation(MachineRepresentation::kTagged, node);
544 void MarkAsRepresentation(MachineRepresentation rep,
545 const InstructionOperand& op);
547 enum CallBufferFlag {
548 kCallCodeImmediate = 1u << 0,
549 kCallAddressImmediate = 1u << 1,
551 kCallFixedTargetRegister = 1u << 3,
552 kAllowCallThroughSlot = 1u << 4
554 typedef base::Flags<CallBufferFlag> CallBufferFlags;
561 void InitializeCallBuffer(Node* call, CallBuffer* buffer,
562 CallBufferFlags flags,
bool is_tail_call,
563 int stack_slot_delta = 0);
564 bool IsTailCallAddressImmediate();
565 int GetTempsCountForTailCallFromJSFunction();
567 FrameStateDescriptor* GetFrameStateDescriptor(Node* node);
568 size_t AddInputsToFrameStateDescriptor(FrameStateDescriptor* descriptor,
569 Node* state, OperandGenerator* g,
570 StateObjectDeduplicator* deduplicator,
571 InstructionOperandVector* inputs,
572 FrameStateInputKind kind, Zone* zone);
573 size_t AddOperandToStateValueDescriptor(StateValueList* values,
574 InstructionOperandVector* inputs,
576 StateObjectDeduplicator* deduplicator,
577 Node* input, MachineType type,
578 FrameStateInputKind kind, Zone* zone);
585 void VisitBlock(BasicBlock* block);
589 void VisitControl(BasicBlock* block);
592 void VisitNode(Node* node);
595 void VisitFloat64Ieee754Binop(Node*, InstructionCode code);
596 void VisitFloat64Ieee754Unop(Node*, InstructionCode code);
598 #define DECLARE_GENERATOR(x) void Visit##x(Node* node); 599 MACHINE_OP_LIST(DECLARE_GENERATOR)
600 MACHINE_SIMD_OP_LIST(DECLARE_GENERATOR)
601 #undef DECLARE_GENERATOR 603 void VisitFinishRegion(Node* node);
604 void VisitParameter(Node* node);
605 void VisitIfException(Node* node);
606 void VisitOsrValue(Node* node);
607 void VisitPhi(Node* node);
608 void VisitProjection(Node* node);
609 void VisitConstant(Node* node);
610 void VisitCall(Node* call, BasicBlock* handler =
nullptr);
611 void VisitCallWithCallerSavedRegisters(Node* call,
612 BasicBlock* handler =
nullptr);
613 void VisitDeoptimizeIf(Node* node);
614 void VisitDeoptimizeUnless(Node* node);
615 void VisitTrapIf(Node* node, TrapId trap_id);
616 void VisitTrapUnless(Node* node, TrapId trap_id);
617 void VisitTailCall(Node* call);
618 void VisitGoto(BasicBlock* target);
619 void VisitBranch(Node* input, BasicBlock* tbranch, BasicBlock* fbranch);
620 void VisitSwitch(Node* node,
const SwitchInfo& sw);
621 void VisitDeoptimize(DeoptimizeKind kind, DeoptimizeReason reason,
622 VectorSlotPair
const& feedback, Node* value);
623 void VisitReturn(Node* ret);
624 void VisitThrow(Node* node);
625 void VisitRetain(Node* node);
626 void VisitUnreachable(Node* node);
627 void VisitDeadValue(Node* node);
629 void VisitWordCompareZero(Node* user, Node* value, FlagsContinuation* cont);
631 void EmitWordPoisonOnSpeculation(Node* node);
633 void EmitPrepareArguments(ZoneVector<compiler::PushParameter>* arguments,
634 const CallDescriptor* call_descriptor, Node* node);
635 void EmitPrepareResults(ZoneVector<compiler::PushParameter>* results,
636 const CallDescriptor* call_descriptor, Node* node);
638 void EmitIdentity(Node* node);
639 bool CanProduceSignalingNaN(Node* node);
650 static void CanonicalizeShuffle(
bool inputs_equal, uint8_t* shuffle,
651 bool* needs_swap,
bool* is_swizzle);
655 void CanonicalizeShuffle(Node* node, uint8_t* shuffle,
bool* is_swizzle);
659 void SwapShuffleInputs(Node* node);
664 static bool TryMatchIdentity(
const uint8_t* shuffle);
669 static bool TryMatchDup(
const uint8_t* shuffle,
int* index) {
670 const int kBytesPerLane = kSimd128Size / LANES;
673 uint8_t lane0[kBytesPerLane];
674 lane0[0] = shuffle[0];
675 if (lane0[0] % kBytesPerLane != 0)
return false;
676 for (
int i = 1;
i < kBytesPerLane; ++
i) {
677 lane0[
i] = shuffle[
i];
678 if (lane0[
i] != lane0[0] +
i)
return false;
681 for (
int i = 1;
i < LANES; ++
i) {
682 for (
int j = 0; j < kBytesPerLane; ++j) {
683 if (lane0[j] != shuffle[
i * kBytesPerLane + j])
return false;
686 *index = lane0[0] / kBytesPerLane;
693 static bool TryMatch32x4Shuffle(
const uint8_t* shuffle, uint8_t* shuffle32x4);
698 static bool TryMatch16x8Shuffle(
const uint8_t* shuffle, uint8_t* shuffle16x8);
704 static bool TryMatchConcat(
const uint8_t* shuffle, uint8_t* offset);
710 static bool TryMatchBlend(
const uint8_t* shuffle);
713 static int32_t Pack4Lanes(
const uint8_t* shuffle);
717 Schedule* schedule()
const {
return schedule_; }
718 Linkage* linkage()
const {
return linkage_; }
719 InstructionSequence* sequence()
const {
return sequence_; }
720 Zone* instruction_zone()
const {
return sequence()->zone(); }
721 Zone* zone()
const {
return zone_; }
723 void set_instruction_selection_failed() {
724 instruction_selection_failed_ =
true;
726 bool instruction_selection_failed() {
return instruction_selection_failed_; }
728 void MarkPairProjectionsAsWord32(Node* node);
729 bool IsSourcePositionUsed(Node* node);
730 void VisitWord32AtomicBinaryOperation(Node* node, ArchOpcode int8_op,
733 ArchOpcode uint16_op,
734 ArchOpcode word32_op);
735 void VisitWord64AtomicBinaryOperation(Node* node, ArchOpcode uint8_op,
736 ArchOpcode uint16_op,
737 ArchOpcode uint32_op,
738 ArchOpcode uint64_op);
739 void VisitWord64AtomicNarrowBinop(Node* node, ArchOpcode uint8_op,
740 ArchOpcode uint16_op, ArchOpcode uint32_op);
745 Linkage*
const linkage_;
746 InstructionSequence*
const sequence_;
747 SourcePositionTable*
const source_positions_;
748 SourcePositionMode
const source_position_mode_;
750 Schedule*
const schedule_;
751 BasicBlock* current_block_;
752 ZoneVector<Instruction*> instructions_;
753 InstructionOperandVector continuation_inputs_;
754 InstructionOperandVector continuation_outputs_;
757 IntVector effect_level_;
758 IntVector virtual_registers_;
759 IntVector virtual_register_rename_;
760 InstructionScheduler* scheduler_;
761 EnableScheduling enable_scheduling_;
762 EnableRootsRelativeAddressing enable_roots_relative_addressing_;
763 EnableSwitchJumpTable enable_switch_jump_table_;
765 PoisoningMitigationLevel poisoning_level_;
767 bool instruction_selection_failed_;
768 ZoneVector<std::pair<int, int>> instr_origins_;
769 EnableTraceTurboJson trace_turbo_;
776 #endif // V8_COMPILER_BACKEND_INSTRUCTION_SELECTOR_H_