5 #include "src/base/adapters.h" 6 #include "src/compiler/backend/instruction-selector-impl.h" 7 #include "src/compiler/node-matchers.h" 8 #include "src/compiler/node-properties.h" 9 #include "src/frame-constants.h" 18 kShift32Imm = 1u << 0,
19 kShift64Imm = 1u << 1,
21 kInt32Imm_Negate = 1u << 3,
32 kAllowImmediate = kAllowRI | kAllowRRI,
33 kAllowMemoryOperand = kAllowRM | kAllowRRM,
34 kAllowDistinctOps = kAllowRRR | kAllowRRI | kAllowRRM,
35 kBitWiseCommonMode = kAllowRI,
36 kArithmeticCommonMode = kAllowRM | kAllowRI
39 typedef base::Flags<OperandMode, uint32_t> OperandModes;
40 DEFINE_OPERATORS_FOR_FLAGS(OperandModes);
41 OperandModes immediateModeMask =
42 OperandMode::kShift32Imm | OperandMode::kShift64Imm |
43 OperandMode::kInt32Imm | OperandMode::kInt32Imm_Negate |
44 OperandMode::kUint32Imm | OperandMode::kInt20Imm;
46 #define AndCommonMode \ 47 ((OperandMode::kAllowRM | \ 48 (CpuFeatures::IsSupported(DISTINCT_OPS) ? OperandMode::kAllowRRR \ 49 : OperandMode::kNone))) 50 #define And64OperandMode AndCommonMode 51 #define Or64OperandMode And64OperandMode 52 #define Xor64OperandMode And64OperandMode 54 #define And32OperandMode \ 55 (AndCommonMode | OperandMode::kAllowRI | OperandMode::kUint32Imm) 56 #define Or32OperandMode And32OperandMode 57 #define Xor32OperandMode And32OperandMode 59 #define Shift32OperandMode \ 60 ((OperandMode::kAllowRI | OperandMode::kShift64Imm | \ 61 (CpuFeatures::IsSupported(DISTINCT_OPS) \ 62 ? (OperandMode::kAllowRRR | OperandMode::kAllowRRI) \ 63 : OperandMode::kNone))) 65 #define Shift64OperandMode \ 66 ((OperandMode::kAllowRI | OperandMode::kShift64Imm | \ 67 OperandMode::kAllowRRR | OperandMode::kAllowRRI)) 69 #define AddOperandMode \ 70 ((OperandMode::kArithmeticCommonMode | OperandMode::kInt32Imm | \ 71 (CpuFeatures::IsSupported(DISTINCT_OPS) \ 72 ? (OperandMode::kAllowRRR | OperandMode::kAllowRRI) \ 73 : OperandMode::kArithmeticCommonMode))) 74 #define SubOperandMode \ 75 ((OperandMode::kArithmeticCommonMode | OperandMode::kInt32Imm_Negate | \ 76 (CpuFeatures::IsSupported(DISTINCT_OPS) \ 77 ? (OperandMode::kAllowRRR | OperandMode::kAllowRRI) \ 78 : OperandMode::kArithmeticCommonMode))) 79 #define MulOperandMode \ 80 (OperandMode::kArithmeticCommonMode | OperandMode::kInt32Imm) 89 if (CanBeImmediate(node, mode)) {
90 return UseImmediate(node);
92 return UseRegister(node);
96 if (NodeProperties::IsConstant(node))
97 return UseRegister(node);
103 if (node->opcode() == IrOpcode::kInt32Constant)
104 return OpParameter<int32_t>(node->op());
105 else if (node->opcode() == IrOpcode::kInt64Constant)
106 return OpParameter<int64_t>(node->op());
114 if (node->opcode() == IrOpcode::kInt32Constant)
115 value = OpParameter<int32_t>(node->op());
116 else if (node->opcode() == IrOpcode::kInt64Constant)
117 value = OpParameter<int64_t>(node->op());
120 return CanBeImmediate(value, mode);
124 if (mode & OperandMode::kShift32Imm)
125 return 0 <= value && value < 32;
126 else if (mode & OperandMode::kShift64Imm)
127 return 0 <= value && value < 64;
128 else if (mode & OperandMode::kInt32Imm)
129 return is_int32(value);
130 else if (mode & OperandMode::kInt32Imm_Negate)
131 return is_int32(-value);
132 else if (mode & OperandMode::kUint32Imm)
133 return is_uint32(value);
134 else if (mode & OperandMode::kInt20Imm)
135 return is_int20(value);
136 else if (mode & OperandMode::kUint12Imm)
137 return is_uint12(value);
142 bool CanBeMemoryOperand(InstructionCode opcode,
Node* user,
Node* input,
144 if (input->opcode() != IrOpcode::kLoad ||
145 !selector()->CanCover(user, input)) {
149 if (effect_level != selector()->GetEffectLevel(input)) {
153 MachineRepresentation rep =
154 LoadRepresentationOf(input->op()).representation();
157 case kS390_LoadAndTestWord64:
158 return rep == MachineRepresentation::kWord64 || IsAnyTagged(rep);
159 case kS390_LoadAndTestWord32:
161 return rep == MachineRepresentation::kWord32;
168 AddressingMode GenerateMemoryOperandInputs(
Node* index,
Node* base,
170 DisplacementMode displacement_mode,
172 size_t* input_count) {
173 AddressingMode mode = kMode_MRI;
174 if (base !=
nullptr) {
175 inputs[(*input_count)++] = UseRegister(base);
176 if (index !=
nullptr) {
177 inputs[(*input_count)++] = UseRegister(index);
178 if (displacement !=
nullptr) {
179 inputs[(*input_count)++] = displacement_mode
180 ? UseNegatedImmediate(displacement)
181 : UseImmediate(displacement);
187 if (displacement ==
nullptr) {
190 inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement
191 ? UseNegatedImmediate(displacement)
192 : UseImmediate(displacement);
197 DCHECK_NOT_NULL(index);
198 inputs[(*input_count)++] = UseRegister(index);
199 if (displacement !=
nullptr) {
200 inputs[(*input_count)++] = displacement_mode == kNegativeDisplacement
201 ? UseNegatedImmediate(displacement)
202 : UseImmediate(displacement);
211 AddressingMode GetEffectiveAddressMemoryOperand(
214 #if V8_TARGET_ARCH_S390X 216 AddressOption::kAllowInputSwap);
219 AddressOption::kAllowInputSwap);
222 if ((m.displacement() ==
nullptr ||
223 CanBeImmediate(m.displacement(), immediate_mode))) {
224 DCHECK_EQ(0, m.scale());
225 return GenerateMemoryOperandInputs(m.index(), m.base(), m.displacement(),
226 m.displacement_mode(), inputs,
229 inputs[(*input_count)++] = UseRegister(operand->InputAt(0));
230 inputs[(*input_count)++] = UseRegister(operand->InputAt(1));
235 bool CanBeBetterLeftOperand(
Node* node)
const {
236 return !selector()->IsLive(node);
239 MachineRepresentation GetRepresentation(
Node* node) {
240 return sequence()->GetRepresentation(selector()->GetVirtualRegister(node));
243 bool Is64BitOperand(
Node* node) {
244 return MachineRepresentation::kWord64 == GetRepresentation(node);
250 if (node->opcode() == IrOpcode::kLoadStackPointer) {
252 LocationOperand::REGISTER,
253 MachineRepresentation::kWord32, sp.code());
255 return UseRegister(node);
261 bool S390OpcodeOnlySupport12BitDisp(ArchOpcode opcode) {
264 case kS390_AddDouble:
266 case kS390_CmpDouble:
267 case kS390_Float32ToDouble:
274 bool S390OpcodeOnlySupport12BitDisp(InstructionCode op) {
275 ArchOpcode opcode = ArchOpcodeField::decode(op);
276 return S390OpcodeOnlySupport12BitDisp(opcode);
279 #define OpcodeImmMode(op) \ 280 (S390OpcodeOnlySupport12BitDisp(op) ? OperandMode::kUint12Imm \ 281 : OperandMode::kInt20Imm) 283 ArchOpcode SelectLoadOpcode(Node* node) {
284 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
285 ArchOpcode opcode = kArchNop;
286 switch (load_rep.representation()) {
287 case MachineRepresentation::kFloat32:
288 opcode = kS390_LoadFloat32;
290 case MachineRepresentation::kFloat64:
291 opcode = kS390_LoadDouble;
293 case MachineRepresentation::kBit:
294 case MachineRepresentation::kWord8:
295 opcode = load_rep.IsSigned() ? kS390_LoadWordS8 : kS390_LoadWordU8;
297 case MachineRepresentation::kWord16:
298 opcode = load_rep.IsSigned() ? kS390_LoadWordS16 : kS390_LoadWordU16;
300 #if !V8_TARGET_ARCH_S390X 301 case MachineRepresentation::kTaggedSigned:
302 case MachineRepresentation::kTaggedPointer:
303 case MachineRepresentation::kTagged:
305 case MachineRepresentation::kWord32:
306 opcode = kS390_LoadWordU32;
308 #if V8_TARGET_ARCH_S390X 309 case MachineRepresentation::kTaggedSigned:
310 case MachineRepresentation::kTaggedPointer:
311 case MachineRepresentation::kTagged:
312 case MachineRepresentation::kWord64:
313 opcode = kS390_LoadWord64;
316 case MachineRepresentation::kWord64:
318 case MachineRepresentation::kSimd128:
319 case MachineRepresentation::kNone:
326 #define RESULT_IS_WORD32_LIST(V) \ 328 V(BitcastFloat32ToInt32) \ 330 V(RoundFloat64ToInt32) \ 331 V(TruncateFloat32ToInt32) \ 332 V(TruncateFloat32ToUint32) \ 333 V(TruncateFloat64ToUint32) \ 334 V(ChangeFloat64ToInt32) \ 335 V(ChangeFloat64ToUint32) \ 339 V(Float64ExtractLowWord32) \ 340 V(Float64ExtractHighWord32) \ 341 V(SignExtendWord8ToInt32) \ 342 V(SignExtendWord16ToInt32) \ 347 V(Int32AddWithOverflow) \ 348 V(Int32SubWithOverflow) \ 349 V(Int32MulWithOverflow) \ 364 bool ProduceWord32Result(Node* node) {
365 #if !V8_TARGET_ARCH_S390X 368 switch (node->opcode()) {
369 #define VISITOR(name) case IrOpcode::k##name: 370 RESULT_IS_WORD32_LIST(VISITOR)
395 case IrOpcode::kLoad: {
396 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
397 switch (load_rep.representation()) {
398 case MachineRepresentation::kWord32:
400 case MachineRepresentation::kWord8:
401 if (load_rep.IsSigned())
415 static inline bool DoZeroExtForResult(Node* node) {
416 #if V8_TARGET_ARCH_S390X 417 return ProduceWord32Result(node);
425 void VisitShift() { }
428 #if V8_TARGET_ARCH_S390X 429 void VisitTryTruncateDouble(InstructionSelector* selector, ArchOpcode opcode,
431 S390OperandGenerator g(selector);
432 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
433 InstructionOperand outputs[2];
434 size_t output_count = 0;
435 outputs[output_count++] = g.DefineAsRegister(node);
437 Node* success_output = NodeProperties::FindProjection(node, 1);
438 if (success_output) {
439 outputs[output_count++] = g.DefineAsRegister(success_output);
442 selector->Emit(opcode, output_count, outputs, 1, inputs);
446 template <
class CanCombineWithLoad>
447 void GenerateRightOperands(InstructionSelector* selector, Node* node,
448 Node* right, InstructionCode& opcode,
449 OperandModes& operand_mode,
450 InstructionOperand* inputs,
size_t& input_count,
451 CanCombineWithLoad canCombineWithLoad) {
452 S390OperandGenerator g(selector);
454 if ((operand_mode & OperandMode::kAllowImmediate) &&
455 g.CanBeImmediate(right, operand_mode)) {
456 inputs[input_count++] = g.UseImmediate(right);
458 operand_mode &= OperandMode::kAllowImmediate;
459 }
else if (operand_mode & OperandMode::kAllowMemoryOperand) {
460 NodeMatcher mright(right);
461 if (mright.IsLoad() && selector->CanCover(node, right) &&
462 canCombineWithLoad(SelectLoadOpcode(right))) {
463 AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
464 right, inputs, &input_count, OpcodeImmMode(opcode));
465 opcode |= AddressingModeField::encode(mode);
466 operand_mode &= ~OperandMode::kAllowImmediate;
467 if (operand_mode & OperandMode::kAllowRM)
468 operand_mode &= ~OperandMode::kAllowDistinctOps;
469 }
else if (operand_mode & OperandMode::kAllowRM) {
470 DCHECK(!(operand_mode & OperandMode::kAllowRRM));
471 inputs[input_count++] = g.UseAnyExceptImmediate(right);
474 ~OperandMode::kAllowImmediate & ~OperandMode::kAllowDistinctOps;
475 }
else if (operand_mode & OperandMode::kAllowRRM) {
476 DCHECK(!(operand_mode & OperandMode::kAllowRM));
477 inputs[input_count++] = g.UseAnyExceptImmediate(right);
479 operand_mode &= ~OperandMode::kAllowImmediate;
484 inputs[input_count++] = g.UseRegister(right);
486 operand_mode &= OperandMode::kAllowRRR;
490 template <
class CanCombineWithLoad>
491 void GenerateBinOpOperands(InstructionSelector* selector, Node* node,
492 Node* left, Node* right, InstructionCode& opcode,
493 OperandModes& operand_mode,
494 InstructionOperand* inputs,
size_t& input_count,
495 CanCombineWithLoad canCombineWithLoad) {
496 S390OperandGenerator g(selector);
498 InstructionOperand
const left_input = g.UseRegister(left);
499 inputs[input_count++] = left_input;
502 inputs[input_count++] = left_input;
504 operand_mode &= OperandMode::kAllowRRR;
506 GenerateRightOperands(selector, node, right, opcode, operand_mode, inputs,
507 input_count, canCombineWithLoad);
511 template <
class CanCombineWithLoad>
512 void VisitUnaryOp(InstructionSelector* selector, Node* node,
513 InstructionCode opcode, OperandModes operand_mode,
514 FlagsContinuation* cont,
515 CanCombineWithLoad canCombineWithLoad);
517 template <
class CanCombineWithLoad>
518 void VisitBinOp(InstructionSelector* selector, Node* node,
519 InstructionCode opcode, OperandModes operand_mode,
520 FlagsContinuation* cont, CanCombineWithLoad canCombineWithLoad);
527 #define VISIT_OP_LIST_32(V) \ 528 V(Word32, Unary, [](ArchOpcode opcode) { \ 529 return opcode == kS390_LoadWordS32 || opcode == kS390_LoadWordU32; \ 532 [](ArchOpcode opcode) { return opcode == kS390_LoadWord64; }) \ 534 [](ArchOpcode opcode) { return opcode == kS390_LoadFloat32; }) \ 536 [](ArchOpcode opcode) { return opcode == kS390_LoadDouble; }) \ 537 V(Word32, Bin, [](ArchOpcode opcode) { \ 538 return opcode == kS390_LoadWordS32 || opcode == kS390_LoadWordU32; \ 541 [](ArchOpcode opcode) { return opcode == kS390_LoadFloat32; }) \ 542 V(Float64, Bin, [](ArchOpcode opcode) { return opcode == kS390_LoadDouble; }) 544 #if V8_TARGET_ARCH_S390X 545 #define VISIT_OP_LIST(V) \ 546 VISIT_OP_LIST_32(V) \ 547 V(Word64, Bin, [](ArchOpcode opcode) { return opcode == kS390_LoadWord64; }) 549 #define VISIT_OP_LIST VISIT_OP_LIST_32 552 #define DECLARE_VISIT_HELPER_FUNCTIONS(type1, type2, canCombineWithLoad) \ 553 static inline void Visit##type1##type2##Op( \ 554 InstructionSelector* selector, Node* node, InstructionCode opcode, \ 555 OperandModes operand_mode, FlagsContinuation* cont) { \ 556 Visit##type2##Op(selector, node, opcode, operand_mode, cont, \ 557 canCombineWithLoad); \ 559 static inline void Visit##type1##type2##Op( \ 560 InstructionSelector* selector, Node* node, InstructionCode opcode, \ 561 OperandModes operand_mode) { \ 562 FlagsContinuation cont; \ 563 Visit##type1##type2##Op(selector, node, opcode, operand_mode, &cont); \ 565 VISIT_OP_LIST(DECLARE_VISIT_HELPER_FUNCTIONS);
566 #undef DECLARE_VISIT_HELPER_FUNCTIONS 567 #undef VISIT_OP_LIST_32 570 template <
class CanCombineWithLoad>
571 void VisitUnaryOp(InstructionSelector* selector, Node* node,
572 InstructionCode opcode, OperandModes operand_mode,
573 FlagsContinuation* cont,
574 CanCombineWithLoad canCombineWithLoad) {
575 S390OperandGenerator g(selector);
576 InstructionOperand inputs[8];
577 size_t input_count = 0;
578 InstructionOperand outputs[2];
579 size_t output_count = 0;
580 Node* input = node->InputAt(0);
582 GenerateRightOperands(selector, node, input, opcode, operand_mode, inputs,
583 input_count, canCombineWithLoad);
585 bool input_is_word32 = ProduceWord32Result(input);
587 bool doZeroExt = DoZeroExtForResult(node);
588 bool canEliminateZeroExt = input_is_word32;
592 inputs[input_count++] = g.TempImmediate(!canEliminateZeroExt);
595 if (!cont->IsDeoptimize()) {
599 if (doZeroExt && canEliminateZeroExt) {
601 outputs[output_count++] = g.DefineSameAsFirst(node);
603 outputs[output_count++] = g.DefineAsRegister(node);
606 outputs[output_count++] = g.DefineSameAsFirst(node);
609 DCHECK_NE(0u, input_count);
610 DCHECK_NE(0u, output_count);
611 DCHECK_GE(arraysize(inputs), input_count);
612 DCHECK_GE(arraysize(outputs), output_count);
614 selector->EmitWithContinuation(opcode, output_count, outputs, input_count,
618 template <
class CanCombineWithLoad>
619 void VisitBinOp(InstructionSelector* selector, Node* node,
620 InstructionCode opcode, OperandModes operand_mode,
621 FlagsContinuation* cont,
622 CanCombineWithLoad canCombineWithLoad) {
623 S390OperandGenerator g(selector);
624 Int32BinopMatcher m(node);
625 Node* left = m.left().node();
626 Node* right = m.right().node();
627 InstructionOperand inputs[8];
628 size_t input_count = 0;
629 InstructionOperand outputs[2];
630 size_t output_count = 0;
632 if (node->op()->HasProperty(Operator::kCommutative) &&
633 !g.CanBeImmediate(right, operand_mode) &&
634 (g.CanBeBetterLeftOperand(right))) {
635 std::swap(left, right);
638 GenerateBinOpOperands(selector, node, left, right, opcode, operand_mode,
639 inputs, input_count, canCombineWithLoad);
641 bool left_is_word32 = ProduceWord32Result(left);
643 bool doZeroExt = DoZeroExtForResult(node);
644 bool canEliminateZeroExt = left_is_word32;
648 inputs[input_count++] = g.TempImmediate(!canEliminateZeroExt);
651 if ((operand_mode & OperandMode::kAllowDistinctOps) &&
655 !cont->IsDeoptimize()) {
656 if (doZeroExt && canEliminateZeroExt) {
658 outputs[output_count++] = g.DefineSameAsFirst(node);
660 outputs[output_count++] = g.DefineAsRegister(node);
663 outputs[output_count++] = g.DefineSameAsFirst(node);
666 DCHECK_NE(0u, input_count);
667 DCHECK_NE(0u, output_count);
668 DCHECK_GE(arraysize(inputs), input_count);
669 DCHECK_GE(arraysize(outputs), output_count);
671 selector->EmitWithContinuation(opcode, output_count, outputs, input_count,
677 void InstructionSelector::VisitStackSlot(Node* node) {
678 StackSlotRepresentation rep = StackSlotRepresentationOf(node->op());
679 int slot = frame_->AllocateSpillSlot(rep.size());
680 OperandGenerator g(
this);
682 Emit(kArchStackSlot, g.DefineAsRegister(node),
683 sequence()->AddImmediate(Constant(slot)), 0,
nullptr);
686 void InstructionSelector::VisitDebugAbort(Node* node) {
687 S390OperandGenerator g(
this);
688 Emit(kArchDebugAbort, g.NoOutput(), g.UseFixed(node->InputAt(0), r3));
691 void InstructionSelector::VisitLoad(Node* node) {
692 S390OperandGenerator g(
this);
693 InstructionCode opcode = SelectLoadOpcode(node);
694 InstructionOperand outputs[1];
695 outputs[0] = g.DefineAsRegister(node);
696 InstructionOperand inputs[3];
697 size_t input_count = 0;
698 AddressingMode mode =
699 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
700 opcode |= AddressingModeField::encode(mode);
701 if (node->opcode() == IrOpcode::kPoisonedLoad) {
702 CHECK_NE(poisoning_level_, PoisoningMitigationLevel::kDontPoison);
703 opcode |= MiscField::encode(kMemoryAccessPoisoned);
706 Emit(opcode, 1, outputs, input_count, inputs);
709 void InstructionSelector::VisitPoisonedLoad(Node* node) { VisitLoad(node); }
711 void InstructionSelector::VisitProtectedLoad(Node* node) {
716 static void VisitGeneralStore(
717 InstructionSelector* selector, Node* node, MachineRepresentation rep,
718 WriteBarrierKind write_barrier_kind = kNoWriteBarrier) {
719 S390OperandGenerator g(selector);
720 Node* base = node->InputAt(0);
721 Node* offset = node->InputAt(1);
722 Node* value = node->InputAt(2);
723 if (write_barrier_kind != kNoWriteBarrier) {
724 DCHECK(CanBeTaggedPointer(rep));
725 AddressingMode addressing_mode;
726 InstructionOperand inputs[3];
727 size_t input_count = 0;
728 inputs[input_count++] = g.UseUniqueRegister(base);
731 if (g.CanBeImmediate(offset, OperandMode::kInt20Imm)) {
732 inputs[input_count++] = g.UseImmediate(offset);
733 addressing_mode = kMode_MRI;
735 inputs[input_count++] = g.UseUniqueRegister(offset);
736 addressing_mode = kMode_MRR;
738 inputs[input_count++] = g.UseUniqueRegister(value);
739 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
740 switch (write_barrier_kind) {
741 case kNoWriteBarrier:
744 case kMapWriteBarrier:
745 record_write_mode = RecordWriteMode::kValueIsMap;
747 case kPointerWriteBarrier:
748 record_write_mode = RecordWriteMode::kValueIsPointer;
750 case kFullWriteBarrier:
751 record_write_mode = RecordWriteMode::kValueIsAny;
754 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
755 size_t const temp_count = arraysize(temps);
756 InstructionCode code = kArchStoreWithWriteBarrier;
757 code |= AddressingModeField::encode(addressing_mode);
758 code |= MiscField::encode(static_cast<int>(record_write_mode));
759 selector->Emit(code, 0,
nullptr, input_count, inputs, temp_count, temps);
761 ArchOpcode opcode = kArchNop;
762 NodeMatcher m(value);
764 case MachineRepresentation::kFloat32:
765 opcode = kS390_StoreFloat32;
767 case MachineRepresentation::kFloat64:
768 opcode = kS390_StoreDouble;
770 case MachineRepresentation::kBit:
771 case MachineRepresentation::kWord8:
772 opcode = kS390_StoreWord8;
774 case MachineRepresentation::kWord16:
775 opcode = kS390_StoreWord16;
777 #if !V8_TARGET_ARCH_S390X 778 case MachineRepresentation::kTaggedSigned:
779 case MachineRepresentation::kTaggedPointer:
780 case MachineRepresentation::kTagged:
782 case MachineRepresentation::kWord32:
783 opcode = kS390_StoreWord32;
784 if (m.IsWord32ReverseBytes()) {
785 opcode = kS390_StoreReverse32;
786 value = value->InputAt(0);
789 #if V8_TARGET_ARCH_S390X 790 case MachineRepresentation::kTaggedSigned:
791 case MachineRepresentation::kTaggedPointer:
792 case MachineRepresentation::kTagged:
793 case MachineRepresentation::kWord64:
794 opcode = kS390_StoreWord64;
795 if (m.IsWord64ReverseBytes()) {
796 opcode = kS390_StoreReverse64;
797 value = value->InputAt(0);
801 case MachineRepresentation::kWord64:
803 case MachineRepresentation::kSimd128:
804 case MachineRepresentation::kNone:
808 InstructionOperand inputs[4];
809 size_t input_count = 0;
810 AddressingMode addressing_mode =
811 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
812 InstructionCode code =
813 opcode | AddressingModeField::encode(addressing_mode);
814 InstructionOperand value_operand = g.UseRegister(value);
815 inputs[input_count++] = value_operand;
816 selector->Emit(code, 0, static_cast<InstructionOperand*>(
nullptr),
817 input_count, inputs);
821 void InstructionSelector::VisitStore(Node* node) {
822 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
823 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
824 MachineRepresentation rep = store_rep.representation();
826 VisitGeneralStore(
this, node, rep, write_barrier_kind);
829 void InstructionSelector::VisitProtectedStore(Node* node) {
835 void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); }
838 void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
841 static inline bool IsContiguousMask32(
uint32_t value,
int* mb,
int* me) {
842 int mask_width = base::bits::CountPopulation(value);
843 int mask_msb = base::bits::CountLeadingZeros32(value);
844 int mask_lsb = base::bits::CountTrailingZeros32(value);
845 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32))
847 *mb = mask_lsb + mask_width - 1;
853 #if V8_TARGET_ARCH_S390X 854 static inline bool IsContiguousMask64(uint64_t value,
int* mb,
int* me) {
855 int mask_width = base::bits::CountPopulation(value);
856 int mask_msb = base::bits::CountLeadingZeros64(value);
857 int mask_lsb = base::bits::CountTrailingZeros64(value);
858 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 64))
860 *mb = mask_lsb + mask_width - 1;
866 #if V8_TARGET_ARCH_S390X 867 void InstructionSelector::VisitWord64And(Node* node) {
868 S390OperandGenerator g(
this);
869 Int64BinopMatcher m(node);
872 if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) {
874 Node* left = m.left().node();
875 if ((m.left().IsWord64Shr() || m.left().IsWord64Shl()) &&
876 CanCover(node, left)) {
877 Int64BinopMatcher mleft(m.left().node());
878 if (mleft.right().IsInRange(0, 63)) {
879 left = mleft.left().node();
880 sh = mleft.right().Value();
881 if (m.left().IsWord64Shr()) {
883 if (mb > 63 - sh) mb = 63 - sh;
884 sh = (64 - sh) & 0x3F;
887 if (me < sh) me = sh;
897 opcode = kS390_RotLeftAndClearLeft64;
899 }
else if (mb == 63) {
901 opcode = kS390_RotLeftAndClearRight64;
903 }
else if (sh && me <= sh && m.left().IsWord64Shl()) {
905 opcode = kS390_RotLeftAndClear64;
908 if (match && CpuFeatures::IsSupported(GENERAL_INSTR_EXT)) {
909 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
910 g.TempImmediate(sh), g.TempImmediate(mask));
915 VisitWord64BinOp(
this, node, kS390_And64, And64OperandMode);
918 void InstructionSelector::VisitWord64Shl(Node* node) {
919 S390OperandGenerator g(
this);
920 Int64BinopMatcher m(node);
922 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
923 Int64BinopMatcher mleft(m.left().node());
924 int sh = m.right().Value();
927 if (mleft.right().HasValue() &&
928 IsContiguousMask64(mleft.right().Value() << sh, &mb, &me)) {
930 if (me < sh) me = sh;
937 opcode = kS390_RotLeftAndClearLeft64;
939 }
else if (mb == 63) {
941 opcode = kS390_RotLeftAndClearRight64;
943 }
else if (sh && me <= sh) {
945 opcode = kS390_RotLeftAndClear64;
948 if (match && CpuFeatures::IsSupported(GENERAL_INSTR_EXT)) {
949 Emit(opcode, g.DefineAsRegister(node),
950 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
951 g.TempImmediate(mask));
957 VisitWord64BinOp(
this, node, kS390_ShiftLeft64, Shift64OperandMode);
960 void InstructionSelector::VisitWord64Shr(Node* node) {
961 S390OperandGenerator g(
this);
962 Int64BinopMatcher m(node);
963 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
964 Int64BinopMatcher mleft(m.left().node());
965 int sh = m.right().Value();
968 if (mleft.right().HasValue() &&
969 IsContiguousMask64((uint64_t)(mleft.right().Value()) >> sh, &mb, &me)) {
971 if (mb > 63 - sh) mb = 63 - sh;
972 sh = (64 - sh) & 0x3F;
979 opcode = kS390_RotLeftAndClearLeft64;
981 }
else if (mb == 63) {
983 opcode = kS390_RotLeftAndClearRight64;
987 Emit(opcode, g.DefineAsRegister(node),
988 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
989 g.TempImmediate(mask));
995 VisitWord64BinOp(
this, node, kS390_ShiftRight64, Shift64OperandMode);
999 static inline bool TryMatchSignExtInt16OrInt8FromWord32Sar(
1000 InstructionSelector* selector, Node* node) {
1001 S390OperandGenerator g(selector);
1002 Int32BinopMatcher m(node);
1003 if (selector->CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
1004 Int32BinopMatcher mleft(m.left().node());
1005 if (mleft.right().Is(16) && m.right().Is(16)) {
1006 bool canEliminateZeroExt = ProduceWord32Result(mleft.left().node());
1007 selector->Emit(kS390_SignExtendWord16ToInt32,
1008 canEliminateZeroExt ? g.DefineSameAsFirst(node)
1009 : g.DefineAsRegister(node),
1010 g.UseRegister(mleft.left().node()),
1011 g.TempImmediate(!canEliminateZeroExt));
1013 }
else if (mleft.right().Is(24) && m.right().Is(24)) {
1014 bool canEliminateZeroExt = ProduceWord32Result(mleft.left().node());
1015 selector->Emit(kS390_SignExtendWord8ToInt32,
1016 canEliminateZeroExt ? g.DefineSameAsFirst(node)
1017 : g.DefineAsRegister(node),
1018 g.UseRegister(mleft.left().node()),
1019 g.TempImmediate(!canEliminateZeroExt));
1026 #if !V8_TARGET_ARCH_S390X 1027 void VisitPairBinop(InstructionSelector* selector, InstructionCode opcode,
1028 InstructionCode opcode2, Node* node) {
1029 S390OperandGenerator g(selector);
1031 Node* projection1 = NodeProperties::FindProjection(node, 1);
1035 InstructionOperand inputs[] = {
1036 g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
1037 g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
1039 InstructionOperand outputs[] = {
1040 g.DefineAsRegister(node),
1041 g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
1043 selector->Emit(opcode, 2, outputs, 4, inputs);
1047 selector->Emit(opcode2, g.DefineSameAsFirst(node),
1048 g.UseRegister(node->InputAt(0)),
1049 g.UseRegister(node->InputAt(2)), g.TempImmediate(0));
1053 void InstructionSelector::VisitInt32PairAdd(Node* node) {
1054 VisitPairBinop(
this, kS390_AddPair, kS390_Add32, node);
1057 void InstructionSelector::VisitInt32PairSub(Node* node) {
1058 VisitPairBinop(
this, kS390_SubPair, kS390_Sub32, node);
1061 void InstructionSelector::VisitInt32PairMul(Node* node) {
1062 S390OperandGenerator g(
this);
1063 Node* projection1 = NodeProperties::FindProjection(node, 1);
1065 InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
1066 g.UseUniqueRegister(node->InputAt(1)),
1067 g.UseUniqueRegister(node->InputAt(2)),
1068 g.UseUniqueRegister(node->InputAt(3))};
1070 InstructionOperand outputs[] = {
1071 g.DefineAsRegister(node),
1072 g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
1074 Emit(kS390_MulPair, 2, outputs, 4, inputs);
1078 Emit(kS390_Mul32, g.DefineSameAsFirst(node),
1079 g.UseRegister(node->InputAt(0)), g.Use(node->InputAt(2)),
1080 g.TempImmediate(0));
1086 void VisitPairShift(InstructionSelector* selector, InstructionCode opcode,
1088 S390OperandGenerator g(selector);
1091 Int32Matcher m(node->InputAt(2));
1092 InstructionOperand shift_operand;
1094 shift_operand = g.UseImmediate(m.node());
1096 shift_operand = g.UseUniqueRegister(m.node());
1099 InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
1100 g.UseUniqueRegister(node->InputAt(1)),
1103 Node* projection1 = NodeProperties::FindProjection(node, 1);
1105 InstructionOperand outputs[2];
1106 InstructionOperand temps[1];
1107 int32_t output_count = 0;
1108 int32_t temp_count = 0;
1110 outputs[output_count++] = g.DefineAsRegister(node);
1112 outputs[output_count++] = g.DefineAsRegister(projection1);
1114 temps[temp_count++] = g.TempRegister();
1117 selector->Emit(opcode, output_count, outputs, 3, inputs, temp_count, temps);
1121 void InstructionSelector::VisitWord32PairShl(Node* node) {
1122 VisitPairShift(
this, kS390_ShiftLeftPair, node);
1125 void InstructionSelector::VisitWord32PairShr(Node* node) {
1126 VisitPairShift(
this, kS390_ShiftRightPair, node);
1129 void InstructionSelector::VisitWord32PairSar(Node* node) {
1130 VisitPairShift(
this, kS390_ShiftRightArithPair, node);
1134 void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
1136 #if V8_TARGET_ARCH_S390X 1137 void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
1140 void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
1142 #if V8_TARGET_ARCH_S390X 1143 void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
1146 void InstructionSelector::VisitSpeculationFence(Node* node) { UNREACHABLE(); }
1148 void InstructionSelector::VisitInt32AbsWithOverflow(Node* node) {
1149 VisitWord32UnaryOp(
this, node, kS390_Abs32, OperandMode::kNone);
1152 void InstructionSelector::VisitInt64AbsWithOverflow(Node* node) {
1153 VisitWord64UnaryOp(
this, node, kS390_Abs64, OperandMode::kNone);
1156 void InstructionSelector::VisitWord64ReverseBytes(Node* node) {
1157 S390OperandGenerator g(
this);
1158 Emit(kS390_LoadReverse64RR, g.DefineAsRegister(node),
1159 g.UseRegister(node->InputAt(0)));
1162 void InstructionSelector::VisitWord32ReverseBytes(Node* node) {
1163 S390OperandGenerator g(
this);
1164 NodeMatcher input(node->InputAt(0));
1165 if (CanCover(node, input.node()) && input.IsLoad()) {
1166 LoadRepresentation load_rep = LoadRepresentationOf(input.node()->op());
1167 if (load_rep.representation() == MachineRepresentation::kWord32) {
1168 Node* base = input.node()->InputAt(0);
1169 Node* offset = input.node()->InputAt(1);
1170 Emit(kS390_LoadReverse32 | AddressingModeField::encode(kMode_MRR),
1172 g.DefineAsRegister(node), g.UseRegister(base),
1173 g.UseRegister(offset));
1177 Emit(kS390_LoadReverse32RR, g.DefineAsRegister(node),
1178 g.UseRegister(node->InputAt(0)));
1181 template <
class Matcher, ArchOpcode neg_opcode>
1182 static inline bool TryMatchNegFromSub(InstructionSelector* selector,
1184 S390OperandGenerator g(selector);
1186 static_assert(neg_opcode == kS390_Neg32 || neg_opcode == kS390_Neg64,
1187 "Provided opcode is not a Neg opcode.");
1188 if (m.left().Is(0)) {
1189 Node* value = m.right().node();
1190 bool doZeroExt = DoZeroExtForResult(node);
1191 bool canEliminateZeroExt = ProduceWord32Result(value);
1193 selector->Emit(neg_opcode,
1194 canEliminateZeroExt ? g.DefineSameAsFirst(node)
1195 : g.DefineAsRegister(node),
1196 g.UseRegister(value),
1197 g.TempImmediate(!canEliminateZeroExt));
1199 selector->Emit(neg_opcode, g.DefineAsRegister(node),
1200 g.UseRegister(value));
1207 template <
class Matcher, ArchOpcode shift_op>
1208 bool TryMatchShiftFromMul(InstructionSelector* selector, Node* node) {
1209 S390OperandGenerator g(selector);
1211 Node* left = m.left().node();
1212 Node* right = m.right().node();
1213 if (g.CanBeImmediate(right, OperandMode::kInt32Imm) &&
1214 base::bits::IsPowerOfTwo(g.GetImmediate(right))) {
1215 int power = 63 - base::bits::CountLeadingZeros64(g.GetImmediate(right));
1216 bool doZeroExt = DoZeroExtForResult(node);
1217 bool canEliminateZeroExt = ProduceWord32Result(left);
1218 InstructionOperand dst = (doZeroExt && !canEliminateZeroExt &&
1219 CpuFeatures::IsSupported(DISTINCT_OPS))
1220 ? g.DefineAsRegister(node)
1221 : g.DefineSameAsFirst(node);
1224 selector->Emit(shift_op, dst, g.UseRegister(left), g.UseImmediate(power),
1225 g.TempImmediate(!canEliminateZeroExt));
1227 selector->Emit(shift_op, dst, g.UseRegister(left), g.UseImmediate(power));
1234 template <ArchOpcode opcode>
1235 static inline bool TryMatchInt32OpWithOverflow(InstructionSelector* selector,
1236 Node* node, OperandModes mode) {
1237 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1238 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1239 VisitWord32BinOp(selector, node, opcode, mode, &cont);
1245 static inline bool TryMatchInt32AddWithOverflow(InstructionSelector* selector,
1247 return TryMatchInt32OpWithOverflow<kS390_Add32>(selector, node,
1251 static inline bool TryMatchInt32SubWithOverflow(InstructionSelector* selector,
1253 return TryMatchInt32OpWithOverflow<kS390_Sub32>(selector, node,
1257 static inline bool TryMatchInt32MulWithOverflow(InstructionSelector* selector,
1259 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1260 if (CpuFeatures::IsSupported(MISC_INSTR_EXT2)) {
1261 TryMatchInt32OpWithOverflow<kS390_Mul32>(
1262 selector, node, OperandMode::kAllowRRR | OperandMode::kAllowRM);
1264 FlagsContinuation cont = FlagsContinuation::ForSet(kNotEqual, ovf);
1265 VisitWord32BinOp(selector, node, kS390_Mul32WithOverflow,
1266 OperandMode::kInt32Imm | OperandMode::kAllowDistinctOps,
1271 return TryMatchShiftFromMul<Int32BinopMatcher, kS390_ShiftLeft32>(selector,
1275 #if V8_TARGET_ARCH_S390X 1276 template <ArchOpcode opcode>
1277 static inline bool TryMatchInt64OpWithOverflow(InstructionSelector* selector,
1278 Node* node, OperandModes mode) {
1279 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1280 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1281 VisitWord64BinOp(selector, node, opcode, mode, &cont);
1287 static inline bool TryMatchInt64AddWithOverflow(InstructionSelector* selector,
1289 return TryMatchInt64OpWithOverflow<kS390_Add64>(selector, node,
1293 static inline bool TryMatchInt64SubWithOverflow(InstructionSelector* selector,
1295 return TryMatchInt64OpWithOverflow<kS390_Sub64>(selector, node,
1300 static inline bool TryMatchDoubleConstructFromInsert(
1301 InstructionSelector* selector, Node* node) {
1302 S390OperandGenerator g(selector);
1303 Node* left = node->InputAt(0);
1304 Node* right = node->InputAt(1);
1305 Node* lo32 =
nullptr;
1306 Node* hi32 =
nullptr;
1308 if (node->opcode() == IrOpcode::kFloat64InsertLowWord32) {
1310 }
else if (node->opcode() == IrOpcode::kFloat64InsertHighWord32) {
1316 if (left->opcode() == IrOpcode::kFloat64InsertLowWord32) {
1317 lo32 = left->InputAt(1);
1318 }
else if (left->opcode() == IrOpcode::kFloat64InsertHighWord32) {
1319 hi32 = left->InputAt(1);
1324 if (!lo32 || !hi32)
return false;
1326 selector->Emit(kS390_DoubleConstruct, g.DefineAsRegister(node),
1327 g.UseRegister(hi32), g.UseRegister(lo32));
1331 #define null ([]() { return false; }) 1333 #define FLOAT_UNARY_OP_LIST_32(V) \ 1334 V(Float32, ChangeFloat32ToFloat64, kS390_Float32ToDouble, \ 1335 OperandMode::kAllowRM, null) \ 1336 V(Float32, BitcastFloat32ToInt32, kS390_BitcastFloat32ToInt32, \ 1337 OperandMode::kAllowRM, null) \ 1338 V(Float64, TruncateFloat64ToFloat32, kS390_DoubleToFloat32, \ 1339 OperandMode::kNone, null) \ 1340 V(Float64, TruncateFloat64ToWord32, kArchTruncateDoubleToI, \ 1341 OperandMode::kNone, null) \ 1342 V(Float64, RoundFloat64ToInt32, kS390_DoubleToInt32, OperandMode::kNone, \ 1344 V(Float32, TruncateFloat32ToInt32, kS390_Float32ToInt32, OperandMode::kNone, \ 1346 V(Float32, TruncateFloat32ToUint32, kS390_Float32ToUint32, \ 1347 OperandMode::kNone, null) \ 1348 V(Float64, TruncateFloat64ToUint32, kS390_DoubleToUint32, \ 1349 OperandMode::kNone, null) \ 1350 V(Float64, ChangeFloat64ToInt32, kS390_DoubleToInt32, OperandMode::kNone, \ 1352 V(Float64, ChangeFloat64ToUint32, kS390_DoubleToUint32, OperandMode::kNone, \ 1354 V(Float64, Float64SilenceNaN, kS390_Float64SilenceNaN, OperandMode::kNone, \ 1356 V(Float32, Float32Abs, kS390_AbsFloat, OperandMode::kNone, null) \ 1357 V(Float64, Float64Abs, kS390_AbsDouble, OperandMode::kNone, null) \ 1358 V(Float32, Float32Sqrt, kS390_SqrtFloat, OperandMode::kNone, null) \ 1359 V(Float64, Float64Sqrt, kS390_SqrtDouble, OperandMode::kNone, null) \ 1360 V(Float32, Float32RoundDown, kS390_FloorFloat, OperandMode::kNone, null) \ 1361 V(Float64, Float64RoundDown, kS390_FloorDouble, OperandMode::kNone, null) \ 1362 V(Float32, Float32RoundUp, kS390_CeilFloat, OperandMode::kNone, null) \ 1363 V(Float64, Float64RoundUp, kS390_CeilDouble, OperandMode::kNone, null) \ 1364 V(Float32, Float32RoundTruncate, kS390_TruncateFloat, OperandMode::kNone, \ 1366 V(Float64, Float64RoundTruncate, kS390_TruncateDouble, OperandMode::kNone, \ 1368 V(Float64, Float64RoundTiesAway, kS390_RoundDouble, OperandMode::kNone, \ 1370 V(Float32, Float32Neg, kS390_NegFloat, OperandMode::kNone, null) \ 1371 V(Float64, Float64Neg, kS390_NegDouble, OperandMode::kNone, null) \ 1373 V(Word32, Float64ExtractLowWord32, kS390_DoubleExtractLowWord32, \ 1374 OperandMode::kNone, null) \ 1375 V(Word32, Float64ExtractHighWord32, kS390_DoubleExtractHighWord32, \ 1376 OperandMode::kNone, null) 1378 #define FLOAT_BIN_OP_LIST(V) \ 1379 V(Float32, Float32Add, kS390_AddFloat, OperandMode::kAllowRM, null) \ 1380 V(Float64, Float64Add, kS390_AddDouble, OperandMode::kAllowRM, null) \ 1381 V(Float32, Float32Sub, kS390_SubFloat, OperandMode::kAllowRM, null) \ 1382 V(Float64, Float64Sub, kS390_SubDouble, OperandMode::kAllowRM, null) \ 1383 V(Float32, Float32Mul, kS390_MulFloat, OperandMode::kAllowRM, null) \ 1384 V(Float64, Float64Mul, kS390_MulDouble, OperandMode::kAllowRM, null) \ 1385 V(Float32, Float32Div, kS390_DivFloat, OperandMode::kAllowRM, null) \ 1386 V(Float64, Float64Div, kS390_DivDouble, OperandMode::kAllowRM, null) \ 1387 V(Float32, Float32Max, kS390_MaxFloat, OperandMode::kNone, null) \ 1388 V(Float64, Float64Max, kS390_MaxDouble, OperandMode::kNone, null) \ 1389 V(Float32, Float32Min, kS390_MinFloat, OperandMode::kNone, null) \ 1390 V(Float64, Float64Min, kS390_MinDouble, OperandMode::kNone, null) 1392 #define WORD32_UNARY_OP_LIST_32(V) \ 1393 V(Word32, Word32Clz, kS390_Cntlz32, OperandMode::kNone, null) \ 1394 V(Word32, Word32Popcnt, kS390_Popcnt32, OperandMode::kNone, null) \ 1395 V(Word32, RoundInt32ToFloat32, kS390_Int32ToFloat32, OperandMode::kNone, \ 1397 V(Word32, RoundUint32ToFloat32, kS390_Uint32ToFloat32, OperandMode::kNone, \ 1399 V(Word32, ChangeInt32ToFloat64, kS390_Int32ToDouble, OperandMode::kNone, \ 1401 V(Word32, ChangeUint32ToFloat64, kS390_Uint32ToDouble, OperandMode::kNone, \ 1403 V(Word32, SignExtendWord8ToInt32, kS390_SignExtendWord8ToInt32, \ 1404 OperandMode::kNone, null) \ 1405 V(Word32, SignExtendWord16ToInt32, kS390_SignExtendWord16ToInt32, \ 1406 OperandMode::kNone, null) \ 1407 V(Word32, BitcastInt32ToFloat32, kS390_BitcastInt32ToFloat32, \ 1408 OperandMode::kNone, null) 1410 #ifdef V8_TARGET_ARCH_S390X 1411 #define FLOAT_UNARY_OP_LIST(V) \ 1412 FLOAT_UNARY_OP_LIST_32(V) \ 1413 V(Float64, ChangeFloat64ToUint64, kS390_DoubleToUint64, OperandMode::kNone, \ 1415 V(Float64, ChangeFloat64ToInt64, kS390_DoubleToInt64, OperandMode::kNone, \ 1417 V(Float64, TruncateFloat64ToInt64, kS390_DoubleToInt64, OperandMode::kNone, \ 1419 V(Float64, BitcastFloat64ToInt64, kS390_BitcastDoubleToInt64, \ 1420 OperandMode::kNone, null) 1422 #define WORD32_UNARY_OP_LIST(V) \ 1423 WORD32_UNARY_OP_LIST_32(V) \ 1424 V(Word32, ChangeInt32ToInt64, kS390_SignExtendWord32ToInt64, \ 1425 OperandMode::kNone, null) \ 1426 V(Word32, SignExtendWord8ToInt64, kS390_SignExtendWord8ToInt64, \ 1427 OperandMode::kNone, null) \ 1428 V(Word32, SignExtendWord16ToInt64, kS390_SignExtendWord16ToInt64, \ 1429 OperandMode::kNone, null) \ 1430 V(Word32, SignExtendWord32ToInt64, kS390_SignExtendWord32ToInt64, \ 1431 OperandMode::kNone, null) \ 1432 V(Word32, ChangeUint32ToUint64, kS390_Uint32ToUint64, OperandMode::kNone, \ 1434 if (ProduceWord32Result(node->InputAt(0))) { \ 1435 EmitIdentity(node); \ 1442 #define FLOAT_UNARY_OP_LIST(V) FLOAT_UNARY_OP_LIST_32(V) 1443 #define WORD32_UNARY_OP_LIST(V) WORD32_UNARY_OP_LIST_32(V) 1446 #define WORD32_BIN_OP_LIST(V) \ 1447 V(Word32, Int32Add, kS390_Add32, AddOperandMode, null) \ 1448 V(Word32, Int32Sub, kS390_Sub32, SubOperandMode, ([&]() { \ 1449 return TryMatchNegFromSub<Int32BinopMatcher, kS390_Neg32>(this, node); \ 1451 V(Word32, Int32Mul, kS390_Mul32, MulOperandMode, ([&]() { \ 1452 return TryMatchShiftFromMul<Int32BinopMatcher, kS390_ShiftLeft32>(this, \ 1455 V(Word32, Int32AddWithOverflow, kS390_Add32, AddOperandMode, \ 1456 ([&]() { return TryMatchInt32AddWithOverflow(this, node); })) \ 1457 V(Word32, Int32SubWithOverflow, kS390_Sub32, SubOperandMode, \ 1458 ([&]() { return TryMatchInt32SubWithOverflow(this, node); })) \ 1459 V(Word32, Int32MulWithOverflow, kS390_Mul32, MulOperandMode, \ 1460 ([&]() { return TryMatchInt32MulWithOverflow(this, node); })) \ 1461 V(Word32, Int32MulHigh, kS390_MulHigh32, \ 1462 OperandMode::kInt32Imm | OperandMode::kAllowDistinctOps, null) \ 1463 V(Word32, Uint32MulHigh, kS390_MulHighU32, \ 1464 OperandMode::kAllowRRM | OperandMode::kAllowRRR, null) \ 1465 V(Word32, Int32Div, kS390_Div32, \ 1466 OperandMode::kAllowRRM | OperandMode::kAllowRRR, null) \ 1467 V(Word32, Uint32Div, kS390_DivU32, \ 1468 OperandMode::kAllowRRM | OperandMode::kAllowRRR, null) \ 1469 V(Word32, Int32Mod, kS390_Mod32, \ 1470 OperandMode::kAllowRRM | OperandMode::kAllowRRR, null) \ 1471 V(Word32, Uint32Mod, kS390_ModU32, \ 1472 OperandMode::kAllowRRM | OperandMode::kAllowRRR, null) \ 1473 V(Word32, Word32Ror, kS390_RotRight32, \ 1474 OperandMode::kAllowRI | OperandMode::kAllowRRR | OperandMode::kAllowRRI | \ 1475 OperandMode::kShift32Imm, \ 1477 V(Word32, Word32And, kS390_And32, And32OperandMode, null) \ 1478 V(Word32, Word32Or, kS390_Or32, Or32OperandMode, null) \ 1479 V(Word32, Word32Xor, kS390_Xor32, Xor32OperandMode, null) \ 1480 V(Word32, Word32Shl, kS390_ShiftLeft32, Shift32OperandMode, null) \ 1481 V(Word32, Word32Shr, kS390_ShiftRight32, Shift32OperandMode, null) \ 1482 V(Word32, Word32Sar, kS390_ShiftRightArith32, Shift32OperandMode, \ 1483 [&]() { return TryMatchSignExtInt16OrInt8FromWord32Sar(this, node); }) \ 1484 V(Word32, Float64InsertLowWord32, kS390_DoubleInsertLowWord32, \ 1485 OperandMode::kAllowRRR, \ 1486 [&]() -> bool { return TryMatchDoubleConstructFromInsert(this, node); }) \ 1487 V(Word32, Float64InsertHighWord32, kS390_DoubleInsertHighWord32, \ 1488 OperandMode::kAllowRRR, \ 1489 [&]() -> bool { return TryMatchDoubleConstructFromInsert(this, node); }) 1491 #define WORD64_UNARY_OP_LIST(V) \ 1492 V(Word64, Word64Popcnt, kS390_Popcnt64, OperandMode::kNone, null) \ 1493 V(Word64, Word64Clz, kS390_Cntlz64, OperandMode::kNone, null) \ 1494 V(Word64, TruncateInt64ToInt32, kS390_Int64ToInt32, OperandMode::kNone, \ 1496 V(Word64, RoundInt64ToFloat32, kS390_Int64ToFloat32, OperandMode::kNone, \ 1498 V(Word64, RoundInt64ToFloat64, kS390_Int64ToDouble, OperandMode::kNone, \ 1500 V(Word64, ChangeInt64ToFloat64, kS390_Int64ToDouble, OperandMode::kNone, \ 1502 V(Word64, RoundUint64ToFloat32, kS390_Uint64ToFloat32, OperandMode::kNone, \ 1504 V(Word64, RoundUint64ToFloat64, kS390_Uint64ToDouble, OperandMode::kNone, \ 1506 V(Word64, BitcastInt64ToFloat64, kS390_BitcastInt64ToDouble, \ 1507 OperandMode::kNone, null) 1509 #define WORD64_BIN_OP_LIST(V) \ 1510 V(Word64, Int64Add, kS390_Add64, AddOperandMode, null) \ 1511 V(Word64, Int64Sub, kS390_Sub64, SubOperandMode, ([&]() { \ 1512 return TryMatchNegFromSub<Int64BinopMatcher, kS390_Neg64>(this, node); \ 1514 V(Word64, Int64AddWithOverflow, kS390_Add64, AddOperandMode, \ 1515 ([&]() { return TryMatchInt64AddWithOverflow(this, node); })) \ 1516 V(Word64, Int64SubWithOverflow, kS390_Sub64, SubOperandMode, \ 1517 ([&]() { return TryMatchInt64SubWithOverflow(this, node); })) \ 1518 V(Word64, Int64Mul, kS390_Mul64, MulOperandMode, ([&]() { \ 1519 return TryMatchShiftFromMul<Int64BinopMatcher, kS390_ShiftLeft64>(this, \ 1522 V(Word64, Int64Div, kS390_Div64, \ 1523 OperandMode::kAllowRRM | OperandMode::kAllowRRR, null) \ 1524 V(Word64, Uint64Div, kS390_DivU64, \ 1525 OperandMode::kAllowRRM | OperandMode::kAllowRRR, null) \ 1526 V(Word64, Int64Mod, kS390_Mod64, \ 1527 OperandMode::kAllowRRM | OperandMode::kAllowRRR, null) \ 1528 V(Word64, Uint64Mod, kS390_ModU64, \ 1529 OperandMode::kAllowRRM | OperandMode::kAllowRRR, null) \ 1530 V(Word64, Word64Sar, kS390_ShiftRightArith64, Shift64OperandMode, null) \ 1531 V(Word64, Word64Ror, kS390_RotRight64, Shift64OperandMode, null) \ 1532 V(Word64, Word64Or, kS390_Or64, Or64OperandMode, null) \ 1533 V(Word64, Word64Xor, kS390_Xor64, Xor64OperandMode, null) 1535 #define DECLARE_UNARY_OP(type, name, op, mode, try_extra) \ 1536 void InstructionSelector::Visit##name(Node* node) { \ 1537 if (std::function<bool()>(try_extra)()) return; \ 1538 Visit##type##UnaryOp(this, node, op, mode); \ 1541 #define DECLARE_BIN_OP(type, name, op, mode, try_extra) \ 1542 void InstructionSelector::Visit##name(Node* node) { \ 1543 if (std::function<bool()>(try_extra)()) return; \ 1544 Visit##type##BinOp(this, node, op, mode); \ 1547 WORD32_BIN_OP_LIST(DECLARE_BIN_OP);
1548 WORD32_UNARY_OP_LIST(DECLARE_UNARY_OP);
1549 FLOAT_UNARY_OP_LIST(DECLARE_UNARY_OP);
1550 FLOAT_BIN_OP_LIST(DECLARE_BIN_OP);
1552 #if V8_TARGET_ARCH_S390X 1553 WORD64_UNARY_OP_LIST(DECLARE_UNARY_OP)
1554 WORD64_BIN_OP_LIST(DECLARE_BIN_OP)
1557 #undef DECLARE_BIN_OP 1558 #undef DECLARE_UNARY_OP 1559 #undef WORD64_BIN_OP_LIST 1560 #undef WORD64_UNARY_OP_LIST 1561 #undef WORD32_BIN_OP_LIST 1562 #undef WORD32_UNARY_OP_LIST 1563 #undef FLOAT_UNARY_OP_LIST 1564 #undef WORD32_UNARY_OP_LIST_32 1565 #undef FLOAT_BIN_OP_LIST 1566 #undef FLOAT_BIN_OP_LIST_32 1569 #if V8_TARGET_ARCH_S390X 1570 void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1571 VisitTryTruncateDouble(
this, kS390_Float32ToInt64, node);
1574 void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1575 VisitTryTruncateDouble(
this, kS390_DoubleToInt64, node);
1578 void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1579 VisitTryTruncateDouble(
this, kS390_Float32ToUint64, node);
1582 void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1583 VisitTryTruncateDouble(
this, kS390_DoubleToUint64, node);
1588 void InstructionSelector::VisitFloat64Mod(Node* node) {
1589 S390OperandGenerator g(
this);
1590 Emit(kS390_ModDouble, g.DefineAsFixed(node, d1),
1591 g.UseFixed(node->InputAt(0), d1), g.UseFixed(node->InputAt(1), d2))
1595 void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
1596 InstructionCode opcode) {
1597 S390OperandGenerator g(
this);
1598 Emit(opcode, g.DefineAsFixed(node, d1), g.UseFixed(node->InputAt(0), d1))
1602 void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
1603 InstructionCode opcode) {
1604 S390OperandGenerator g(
this);
1605 Emit(opcode, g.DefineAsFixed(node, d1), g.UseFixed(node->InputAt(0), d1),
1606 g.UseFixed(node->InputAt(1), d2))
1610 void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1614 void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1618 static bool CompareLogical(FlagsContinuation* cont) {
1619 switch (cont->condition()) {
1620 case kUnsignedLessThan:
1621 case kUnsignedGreaterThanOrEqual:
1622 case kUnsignedLessThanOrEqual:
1623 case kUnsignedGreaterThan:
1634 void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1635 InstructionOperand left, InstructionOperand right,
1636 FlagsContinuation* cont) {
1637 selector->EmitWithContinuation(opcode, left, right, cont);
1640 void VisitLoadAndTest(InstructionSelector* selector, InstructionCode opcode,
1641 Node* node, Node* value, FlagsContinuation* cont,
1642 bool discard_output =
false);
1645 void VisitWordCompare(InstructionSelector* selector, Node* node,
1646 InstructionCode opcode, FlagsContinuation* cont,
1647 OperandModes immediate_mode) {
1648 S390OperandGenerator g(selector);
1649 Node* left = node->InputAt(0);
1650 Node* right = node->InputAt(1);
1652 DCHECK(IrOpcode::IsComparisonOpcode(node->opcode()) ||
1653 node->opcode() == IrOpcode::kInt32Sub ||
1654 node->opcode() == IrOpcode::kInt64Sub);
1656 InstructionOperand inputs[8];
1657 InstructionOperand outputs[1];
1658 size_t input_count = 0;
1659 size_t output_count = 0;
1663 int effect_level = selector->GetEffectLevel(node);
1664 if (cont->IsBranch()) {
1665 effect_level = selector->GetEffectLevel(
1666 cont->true_block()->PredecessorAt(0)->control_input());
1669 if ((!g.CanBeImmediate(right, immediate_mode) &&
1670 g.CanBeImmediate(left, immediate_mode)) ||
1671 (!g.CanBeMemoryOperand(opcode, node, right, effect_level) &&
1672 g.CanBeMemoryOperand(opcode, node, left, effect_level))) {
1673 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1674 std::swap(left, right);
1678 if (g.CanBeImmediate(right, immediate_mode) && g.GetImmediate(right) == 0) {
1679 DCHECK(opcode == kS390_Cmp32 || opcode == kS390_Cmp64);
1680 ArchOpcode load_and_test = (opcode == kS390_Cmp32)
1681 ? kS390_LoadAndTestWord32
1682 : kS390_LoadAndTestWord64;
1683 return VisitLoadAndTest(selector, load_and_test, node, left, cont,
true);
1686 inputs[input_count++] = g.UseRegisterOrStackPointer(left);
1687 if (g.CanBeMemoryOperand(opcode, node, right, effect_level)) {
1689 AddressingMode addressing_mode = g.GetEffectiveAddressMemoryOperand(
1690 right, inputs, &input_count, OpcodeImmMode(opcode));
1691 opcode |= AddressingModeField::encode(addressing_mode);
1692 }
else if (g.CanBeImmediate(right, immediate_mode)) {
1693 inputs[input_count++] = g.UseImmediate(right);
1695 inputs[input_count++] = g.UseAnyExceptImmediate(right);
1698 DCHECK(input_count <= 8 && output_count <= 1);
1699 selector->EmitWithContinuation(opcode, output_count, outputs, input_count,
1703 void VisitWord32Compare(InstructionSelector* selector, Node* node,
1704 FlagsContinuation* cont) {
1706 (CompareLogical(cont) ? OperandMode::kUint32Imm : OperandMode::kInt32Imm);
1707 VisitWordCompare(selector, node, kS390_Cmp32, cont, mode);
1710 #if V8_TARGET_ARCH_S390X 1711 void VisitWord64Compare(InstructionSelector* selector, Node* node,
1712 FlagsContinuation* cont) {
1714 (CompareLogical(cont) ? OperandMode::kUint32Imm : OperandMode::kInt32Imm);
1715 VisitWordCompare(selector, node, kS390_Cmp64, cont, mode);
1720 void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1721 FlagsContinuation* cont) {
1722 VisitWordCompare(selector, node, kS390_CmpFloat, cont, OperandMode::kNone);
1726 void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1727 FlagsContinuation* cont) {
1728 VisitWordCompare(selector, node, kS390_CmpDouble, cont, OperandMode::kNone);
1731 void VisitTestUnderMask(InstructionSelector* selector, Node* node,
1732 FlagsContinuation* cont) {
1733 DCHECK(node->opcode() == IrOpcode::kWord32And ||
1734 node->opcode() == IrOpcode::kWord64And);
1736 (node->opcode() == IrOpcode::kWord32And) ? kS390_Tst32 : kS390_Tst64;
1737 S390OperandGenerator g(selector);
1738 Node* left = node->InputAt(0);
1739 Node* right = node->InputAt(1);
1740 if (!g.CanBeImmediate(right, OperandMode::kUint32Imm) &&
1741 g.CanBeImmediate(left, OperandMode::kUint32Imm)) {
1742 std::swap(left, right);
1744 VisitCompare(selector, opcode, g.UseRegister(left),
1745 g.UseOperand(right, OperandMode::kUint32Imm), cont);
1748 void VisitLoadAndTest(InstructionSelector* selector, InstructionCode opcode,
1749 Node* node, Node* value, FlagsContinuation* cont,
1750 bool discard_output) {
1751 static_assert(kS390_LoadAndTestFloat64 - kS390_LoadAndTestWord32 == 3,
1752 "LoadAndTest Opcode shouldn't contain other opcodes.");
1755 DCHECK(opcode >= kS390_LoadAndTestWord32 ||
1756 opcode <= kS390_LoadAndTestWord64);
1758 S390OperandGenerator g(selector);
1759 InstructionOperand inputs[8];
1760 InstructionOperand outputs[2];
1761 size_t input_count = 0;
1762 size_t output_count = 0;
1763 bool use_value =
false;
1765 int effect_level = selector->GetEffectLevel(node);
1766 if (cont->IsBranch()) {
1767 effect_level = selector->GetEffectLevel(
1768 cont->true_block()->PredecessorAt(0)->control_input());
1771 if (g.CanBeMemoryOperand(opcode, node, value, effect_level)) {
1773 AddressingMode addressing_mode =
1774 g.GetEffectiveAddressMemoryOperand(value, inputs, &input_count);
1775 opcode |= AddressingModeField::encode(addressing_mode);
1777 inputs[input_count++] = g.UseAnyExceptImmediate(value);
1781 if (!discard_output && !use_value) {
1782 outputs[output_count++] = g.DefineAsRegister(value);
1785 DCHECK(input_count <= 8 && output_count <= 2);
1786 selector->EmitWithContinuation(opcode, output_count, outputs, input_count,
1793 void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
1794 FlagsContinuation* cont) {
1796 while (value->opcode() == IrOpcode::kWord32Equal && CanCover(user, value)) {
1797 Int32BinopMatcher m(value);
1798 if (!m.right().Is(0))
break;
1801 value = m.left().node();
1805 FlagsCondition fc = cont->condition();
1806 if (CanCover(user, value)) {
1807 switch (value->opcode()) {
1808 case IrOpcode::kWord32Equal: {
1809 cont->OverwriteAndNegateIfEqual(kEqual);
1810 Int32BinopMatcher m(value);
1811 if (m.right().Is(0)) {
1813 Node*
const user = m.node();
1814 Node*
const value = m.left().node();
1815 if (CanCover(user, value)) {
1816 switch (value->opcode()) {
1817 case IrOpcode::kInt32Sub:
1818 return VisitWord32Compare(
this, value, cont);
1819 case IrOpcode::kWord32And:
1820 return VisitTestUnderMask(
this, value, cont);
1826 return VisitWord32Compare(
this, value, cont);
1828 case IrOpcode::kInt32LessThan:
1829 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1830 return VisitWord32Compare(
this, value, cont);
1831 case IrOpcode::kInt32LessThanOrEqual:
1832 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1833 return VisitWord32Compare(
this, value, cont);
1834 case IrOpcode::kUint32LessThan:
1835 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1836 return VisitWord32Compare(
this, value, cont);
1837 case IrOpcode::kUint32LessThanOrEqual:
1838 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1839 return VisitWord32Compare(
this, value, cont);
1840 #if V8_TARGET_ARCH_S390X 1841 case IrOpcode::kWord64Equal: {
1842 cont->OverwriteAndNegateIfEqual(kEqual);
1843 Int64BinopMatcher m(value);
1844 if (m.right().Is(0)) {
1846 Node*
const user = m.node();
1847 Node*
const value = m.left().node();
1848 if (CanCover(user, value)) {
1849 switch (value->opcode()) {
1850 case IrOpcode::kInt64Sub:
1851 return VisitWord64Compare(
this, value, cont);
1852 case IrOpcode::kWord64And:
1853 return VisitTestUnderMask(
this, value, cont);
1859 return VisitWord64Compare(
this, value, cont);
1861 case IrOpcode::kInt64LessThan:
1862 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1863 return VisitWord64Compare(
this, value, cont);
1864 case IrOpcode::kInt64LessThanOrEqual:
1865 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1866 return VisitWord64Compare(
this, value, cont);
1867 case IrOpcode::kUint64LessThan:
1868 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1869 return VisitWord64Compare(
this, value, cont);
1870 case IrOpcode::kUint64LessThanOrEqual:
1871 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1872 return VisitWord64Compare(
this, value, cont);
1874 case IrOpcode::kFloat32Equal:
1875 cont->OverwriteAndNegateIfEqual(kEqual);
1876 return VisitFloat32Compare(
this, value, cont);
1877 case IrOpcode::kFloat32LessThan:
1878 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1879 return VisitFloat32Compare(
this, value, cont);
1880 case IrOpcode::kFloat32LessThanOrEqual:
1881 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1882 return VisitFloat32Compare(
this, value, cont);
1883 case IrOpcode::kFloat64Equal:
1884 cont->OverwriteAndNegateIfEqual(kEqual);
1885 return VisitFloat64Compare(
this, value, cont);
1886 case IrOpcode::kFloat64LessThan:
1887 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1888 return VisitFloat64Compare(
this, value, cont);
1889 case IrOpcode::kFloat64LessThanOrEqual:
1890 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1891 return VisitFloat64Compare(
this, value, cont);
1892 case IrOpcode::kProjection:
1895 if (ProjectionIndexOf(value->op()) == 1u) {
1901 Node*
const node = value->InputAt(0);
1902 Node*
const result = NodeProperties::FindProjection(node, 0);
1903 if (result ==
nullptr || IsDefined(result)) {
1904 switch (node->opcode()) {
1905 case IrOpcode::kInt32AddWithOverflow:
1906 cont->OverwriteAndNegateIfEqual(kOverflow);
1907 return VisitWord32BinOp(
this, node, kS390_Add32, AddOperandMode,
1909 case IrOpcode::kInt32SubWithOverflow:
1910 cont->OverwriteAndNegateIfEqual(kOverflow);
1911 return VisitWord32BinOp(
this, node, kS390_Sub32, SubOperandMode,
1913 case IrOpcode::kInt32MulWithOverflow:
1914 if (CpuFeatures::IsSupported(MISC_INSTR_EXT2)) {
1915 cont->OverwriteAndNegateIfEqual(kOverflow);
1916 return VisitWord32BinOp(
1917 this, node, kS390_Mul32,
1918 OperandMode::kAllowRRR | OperandMode::kAllowRM, cont);
1920 cont->OverwriteAndNegateIfEqual(kNotEqual);
1921 return VisitWord32BinOp(
1922 this, node, kS390_Mul32WithOverflow,
1923 OperandMode::kInt32Imm | OperandMode::kAllowDistinctOps,
1926 case IrOpcode::kInt32AbsWithOverflow:
1927 cont->OverwriteAndNegateIfEqual(kOverflow);
1928 return VisitWord32UnaryOp(
this, node, kS390_Abs32,
1929 OperandMode::kNone, cont);
1930 #if V8_TARGET_ARCH_S390X 1931 case IrOpcode::kInt64AbsWithOverflow:
1932 cont->OverwriteAndNegateIfEqual(kOverflow);
1933 return VisitWord64UnaryOp(
this, node, kS390_Abs64,
1934 OperandMode::kNone, cont);
1935 case IrOpcode::kInt64AddWithOverflow:
1936 cont->OverwriteAndNegateIfEqual(kOverflow);
1937 return VisitWord64BinOp(
this, node, kS390_Add64, AddOperandMode,
1939 case IrOpcode::kInt64SubWithOverflow:
1940 cont->OverwriteAndNegateIfEqual(kOverflow);
1941 return VisitWord64BinOp(
this, node, kS390_Sub64, SubOperandMode,
1950 case IrOpcode::kInt32Sub:
1951 if (fc == kNotEqual || fc == kEqual)
1952 return VisitWord32Compare(
this, value, cont);
1954 case IrOpcode::kWord32And:
1955 return VisitTestUnderMask(
this, value, cont);
1956 case IrOpcode::kLoad: {
1957 LoadRepresentation load_rep = LoadRepresentationOf(value->op());
1958 switch (load_rep.representation()) {
1959 case MachineRepresentation::kWord32:
1960 return VisitLoadAndTest(
this, kS390_LoadAndTestWord32, user, value,
1967 case IrOpcode::kInt32Add:
1970 case IrOpcode::kWord32Or:
1971 if (fc == kNotEqual || fc == kEqual)
1972 return VisitWord32BinOp(
this, value, kS390_Or32, Or32OperandMode,
1975 case IrOpcode::kWord32Xor:
1976 if (fc == kNotEqual || fc == kEqual)
1977 return VisitWord32BinOp(
this, value, kS390_Xor32, Xor32OperandMode,
1980 case IrOpcode::kWord32Sar:
1981 case IrOpcode::kWord32Shl:
1982 case IrOpcode::kWord32Shr:
1983 case IrOpcode::kWord32Ror:
1986 #if V8_TARGET_ARCH_S390X 1987 case IrOpcode::kInt64Sub:
1988 if (fc == kNotEqual || fc == kEqual)
1989 return VisitWord64Compare(
this, value, cont);
1991 case IrOpcode::kWord64And:
1992 return VisitTestUnderMask(
this, value, cont);
1993 case IrOpcode::kInt64Add:
1996 case IrOpcode::kWord64Or:
1997 if (fc == kNotEqual || fc == kEqual)
1998 return VisitWord64BinOp(
this, value, kS390_Or64, Or64OperandMode,
2001 case IrOpcode::kWord64Xor:
2002 if (fc == kNotEqual || fc == kEqual)
2003 return VisitWord64BinOp(
this, value, kS390_Xor64, Xor64OperandMode,
2006 case IrOpcode::kWord64Sar:
2007 case IrOpcode::kWord64Shl:
2008 case IrOpcode::kWord64Shr:
2009 case IrOpcode::kWord64Ror:
2019 VisitLoadAndTest(
this, kS390_LoadAndTestWord32, user, value, cont,
true);
2022 void InstructionSelector::VisitSwitch(Node* node,
const SwitchInfo& sw) {
2023 S390OperandGenerator g(
this);
2024 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
2027 if (enable_switch_jump_table_ == kEnableSwitchJumpTable) {
2028 static const size_t kMaxTableSwitchValueRange = 2 << 16;
2029 size_t table_space_cost = 4 + sw.value_range();
2030 size_t table_time_cost = 3;
2031 size_t lookup_space_cost = 3 + 2 * sw.case_count();
2032 size_t lookup_time_cost = sw.case_count();
2033 if (sw.case_count() > 0 &&
2034 table_space_cost + 3 * table_time_cost <=
2035 lookup_space_cost + 3 * lookup_time_cost &&
2036 sw.min_value() > std::numeric_limits<int32_t>::min() &&
2037 sw.value_range() <= kMaxTableSwitchValueRange) {
2038 InstructionOperand index_operand = value_operand;
2039 if (sw.min_value()) {
2040 index_operand = g.TempRegister();
2041 Emit(kS390_Lay | AddressingModeField::encode(kMode_MRI), index_operand,
2042 value_operand, g.TempImmediate(-sw.min_value()));
2044 #if V8_TARGET_ARCH_S390X 2045 InstructionOperand index_operand_zero_ext = g.TempRegister();
2046 Emit(kS390_Uint32ToUint64, index_operand_zero_ext, index_operand);
2047 index_operand = index_operand_zero_ext;
2050 return EmitTableSwitch(sw, index_operand);
2055 return EmitBinarySearchSwitch(sw, value_operand);
2058 void InstructionSelector::VisitWord32Equal(Node*
const node) {
2059 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
2060 Int32BinopMatcher m(node);
2061 if (m.right().Is(0)) {
2062 return VisitLoadAndTest(
this, kS390_LoadAndTestWord32, m.node(),
2063 m.left().node(), &cont,
true);
2065 VisitWord32Compare(
this, node, &cont);
2068 void InstructionSelector::VisitInt32LessThan(Node* node) {
2069 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
2070 VisitWord32Compare(
this, node, &cont);
2073 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
2074 FlagsContinuation cont =
2075 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
2076 VisitWord32Compare(
this, node, &cont);
2079 void InstructionSelector::VisitUint32LessThan(Node* node) {
2080 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
2081 VisitWord32Compare(
this, node, &cont);
2084 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
2085 FlagsContinuation cont =
2086 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
2087 VisitWord32Compare(
this, node, &cont);
2090 #if V8_TARGET_ARCH_S390X 2091 void InstructionSelector::VisitWord64Equal(Node*
const node) {
2092 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
2093 Int64BinopMatcher m(node);
2094 if (m.right().Is(0)) {
2095 return VisitLoadAndTest(
this, kS390_LoadAndTestWord64, m.node(),
2096 m.left().node(), &cont,
true);
2098 VisitWord64Compare(
this, node, &cont);
2101 void InstructionSelector::VisitInt64LessThan(Node* node) {
2102 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
2103 VisitWord64Compare(
this, node, &cont);
2106 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
2107 FlagsContinuation cont =
2108 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
2109 VisitWord64Compare(
this, node, &cont);
2112 void InstructionSelector::VisitUint64LessThan(Node* node) {
2113 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
2114 VisitWord64Compare(
this, node, &cont);
2117 void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
2118 FlagsContinuation cont =
2119 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
2120 VisitWord64Compare(
this, node, &cont);
2124 void InstructionSelector::VisitFloat32Equal(Node* node) {
2125 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
2126 VisitFloat32Compare(
this, node, &cont);
2129 void InstructionSelector::VisitFloat32LessThan(Node* node) {
2130 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
2131 VisitFloat32Compare(
this, node, &cont);
2134 void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
2135 FlagsContinuation cont =
2136 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
2137 VisitFloat32Compare(
this, node, &cont);
2140 void InstructionSelector::VisitFloat64Equal(Node* node) {
2141 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
2142 VisitFloat64Compare(
this, node, &cont);
2145 void InstructionSelector::VisitFloat64LessThan(Node* node) {
2146 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
2147 VisitFloat64Compare(
this, node, &cont);
2150 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
2151 FlagsContinuation cont =
2152 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
2153 VisitFloat64Compare(
this, node, &cont);
2156 void InstructionSelector::EmitPrepareArguments(
2157 ZoneVector<PushParameter>* arguments,
const CallDescriptor* call_descriptor,
2159 S390OperandGenerator g(
this);
2162 if (call_descriptor->IsCFunctionCall()) {
2163 Emit(kArchPrepareCallCFunction | MiscField::encode(static_cast<int>(
2164 call_descriptor->ParameterCount())),
2165 0,
nullptr, 0,
nullptr);
2168 int slot = kStackFrameExtraParamSlot;
2169 for (PushParameter input : (*arguments)) {
2170 if (input.node ==
nullptr)
continue;
2171 Emit(kS390_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node),
2172 g.TempImmediate(slot));
2180 for (PushParameter input : *arguments) {
2181 if (input.node ==
nullptr)
continue;
2182 num_slots += input.location.GetType().representation() ==
2183 MachineRepresentation::kFloat64
2184 ? kDoubleSize / kPointerSize
2187 Emit(kS390_StackClaim, g.NoOutput(), g.TempImmediate(num_slots));
2188 for (PushParameter input : *arguments) {
2191 Emit(kS390_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node),
2192 g.TempImmediate(slot));
2193 slot += input.location.GetType().representation() ==
2194 MachineRepresentation::kFloat64
2195 ? (kDoubleSize / kPointerSize)
2199 DCHECK(num_slots == slot);
2203 bool InstructionSelector::IsTailCallAddressImmediate() {
return false; }
2205 int InstructionSelector::GetTempsCountForTailCallFromJSFunction() {
return 3; }
2207 void InstructionSelector::VisitWord32AtomicLoad(Node* node) {
2208 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
2209 DCHECK(load_rep.representation() == MachineRepresentation::kWord8 ||
2210 load_rep.representation() == MachineRepresentation::kWord16 ||
2211 load_rep.representation() == MachineRepresentation::kWord32);
2216 void InstructionSelector::VisitWord32AtomicStore(Node* node) {
2217 MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
2218 VisitGeneralStore(
this, node, rep);
2221 void VisitAtomicExchange(InstructionSelector* selector, Node* node,
2222 ArchOpcode opcode) {
2223 S390OperandGenerator g(selector);
2224 Node* base = node->InputAt(0);
2225 Node* index = node->InputAt(1);
2226 Node* value = node->InputAt(2);
2228 AddressingMode addressing_mode = kMode_MRR;
2229 InstructionOperand inputs[3];
2230 size_t input_count = 0;
2231 inputs[input_count++] = g.UseUniqueRegister(base);
2232 inputs[input_count++] = g.UseUniqueRegister(index);
2233 inputs[input_count++] = g.UseUniqueRegister(value);
2234 InstructionOperand outputs[1];
2235 outputs[0] = g.DefineAsRegister(node);
2236 InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2237 selector->Emit(code, 1, outputs, input_count, inputs);
2240 void InstructionSelector::VisitWord32AtomicExchange(Node* node) {
2241 ArchOpcode opcode = kArchNop;
2242 MachineType type = AtomicOpType(node->op());
2243 if (type == MachineType::Int8()) {
2244 opcode = kWord32AtomicExchangeInt8;
2245 }
else if (type == MachineType::Uint8()) {
2246 opcode = kWord32AtomicExchangeUint8;
2247 }
else if (type == MachineType::Int16()) {
2248 opcode = kWord32AtomicExchangeInt16;
2249 }
else if (type == MachineType::Uint16()) {
2250 opcode = kWord32AtomicExchangeUint16;
2251 }
else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
2252 opcode = kWord32AtomicExchangeWord32;
2257 VisitAtomicExchange(
this, node, opcode);
2260 void InstructionSelector::VisitWord64AtomicExchange(Node* node) {
2261 ArchOpcode opcode = kArchNop;
2262 MachineType type = AtomicOpType(node->op());
2263 if (type == MachineType::Uint8()) {
2264 opcode = kS390_Word64AtomicExchangeUint8;
2265 }
else if (type == MachineType::Uint16()) {
2266 opcode = kS390_Word64AtomicExchangeUint16;
2267 }
else if (type == MachineType::Uint32()) {
2268 opcode = kS390_Word64AtomicExchangeUint32;
2269 }
else if (type == MachineType::Uint64()) {
2270 opcode = kS390_Word64AtomicExchangeUint64;
2275 VisitAtomicExchange(
this, node, opcode);
2278 void VisitAtomicCompareExchange(InstructionSelector* selector, Node* node,
2279 ArchOpcode opcode) {
2280 S390OperandGenerator g(selector);
2281 Node* base = node->InputAt(0);
2282 Node* index = node->InputAt(1);
2283 Node* old_value = node->InputAt(2);
2284 Node* new_value = node->InputAt(3);
2286 InstructionOperand inputs[4];
2287 size_t input_count = 0;
2288 inputs[input_count++] = g.UseUniqueRegister(old_value);
2289 inputs[input_count++] = g.UseUniqueRegister(new_value);
2290 inputs[input_count++] = g.UseUniqueRegister(base);
2292 AddressingMode addressing_mode;
2293 if (g.CanBeImmediate(index, OperandMode::kInt20Imm)) {
2294 inputs[input_count++] = g.UseImmediate(index);
2295 addressing_mode = kMode_MRI;
2297 inputs[input_count++] = g.UseUniqueRegister(index);
2298 addressing_mode = kMode_MRR;
2301 InstructionOperand outputs[1];
2302 size_t output_count = 0;
2303 outputs[output_count++] = g.DefineSameAsFirst(node);
2305 InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2306 selector->Emit(code, output_count, outputs, input_count, inputs);
2309 void InstructionSelector::VisitWord32AtomicCompareExchange(Node* node) {
2310 MachineType type = AtomicOpType(node->op());
2311 ArchOpcode opcode = kArchNop;
2312 if (type == MachineType::Int8()) {
2313 opcode = kWord32AtomicCompareExchangeInt8;
2314 }
else if (type == MachineType::Uint8()) {
2315 opcode = kWord32AtomicCompareExchangeUint8;
2316 }
else if (type == MachineType::Int16()) {
2317 opcode = kWord32AtomicCompareExchangeInt16;
2318 }
else if (type == MachineType::Uint16()) {
2319 opcode = kWord32AtomicCompareExchangeUint16;
2320 }
else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
2321 opcode = kWord32AtomicCompareExchangeWord32;
2326 VisitAtomicCompareExchange(
this, node, opcode);
2329 void InstructionSelector::VisitWord64AtomicCompareExchange(Node* node) {
2330 MachineType type = AtomicOpType(node->op());
2331 ArchOpcode opcode = kArchNop;
2332 if (type == MachineType::Uint8()) {
2333 opcode = kS390_Word64AtomicCompareExchangeUint8;
2334 }
else if (type == MachineType::Uint16()) {
2335 opcode = kS390_Word64AtomicCompareExchangeUint16;
2336 }
else if (type == MachineType::Uint32()) {
2337 opcode = kS390_Word64AtomicCompareExchangeUint32;
2338 }
else if (type == MachineType::Uint64()) {
2339 opcode = kS390_Word64AtomicCompareExchangeUint64;
2344 VisitAtomicCompareExchange(
this, node, opcode);
2347 void VisitAtomicBinop(InstructionSelector* selector, Node* node,
2348 ArchOpcode opcode) {
2349 S390OperandGenerator g(selector);
2350 Node* base = node->InputAt(0);
2351 Node* index = node->InputAt(1);
2352 Node* value = node->InputAt(2);
2354 InstructionOperand inputs[3];
2355 size_t input_count = 0;
2356 inputs[input_count++] = g.UseUniqueRegister(base);
2358 AddressingMode addressing_mode;
2359 if (g.CanBeImmediate(index, OperandMode::kInt20Imm)) {
2360 inputs[input_count++] = g.UseImmediate(index);
2361 addressing_mode = kMode_MRI;
2363 inputs[input_count++] = g.UseUniqueRegister(index);
2364 addressing_mode = kMode_MRR;
2367 inputs[input_count++] = g.UseUniqueRegister(value);
2369 InstructionOperand outputs[1];
2370 size_t output_count = 0;
2371 outputs[output_count++] = g.DefineAsRegister(node);
2373 InstructionOperand temps[1];
2374 size_t temp_count = 0;
2375 temps[temp_count++] = g.TempRegister();
2377 InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2378 selector->Emit(code, output_count, outputs, input_count, inputs, temp_count,
2382 void InstructionSelector::VisitWord32AtomicBinaryOperation(
2383 Node* node, ArchOpcode int8_op, ArchOpcode uint8_op, ArchOpcode int16_op,
2384 ArchOpcode uint16_op, ArchOpcode word32_op) {
2385 MachineType type = AtomicOpType(node->op());
2386 ArchOpcode opcode = kArchNop;
2388 if (type == MachineType::Int8()) {
2390 }
else if (type == MachineType::Uint8()) {
2392 }
else if (type == MachineType::Int16()) {
2394 }
else if (type == MachineType::Uint16()) {
2396 }
else if (type == MachineType::Int32() || type == MachineType::Uint32()) {
2402 VisitAtomicBinop(
this, node, opcode);
2405 #define VISIT_ATOMIC_BINOP(op) \ 2406 void InstructionSelector::VisitWord32Atomic##op(Node* node) { \ 2407 VisitWord32AtomicBinaryOperation( \ 2408 node, kWord32Atomic##op##Int8, kWord32Atomic##op##Uint8, \ 2409 kWord32Atomic##op##Int16, kWord32Atomic##op##Uint16, \ 2410 kWord32Atomic##op##Word32); \ 2412 VISIT_ATOMIC_BINOP(Add)
2413 VISIT_ATOMIC_BINOP(Sub)
2414 VISIT_ATOMIC_BINOP(And)
2415 VISIT_ATOMIC_BINOP(Or)
2416 VISIT_ATOMIC_BINOP(Xor)
2417 #undef VISIT_ATOMIC_BINOP 2419 void InstructionSelector::VisitWord64AtomicBinaryOperation(
2420 Node* node, ArchOpcode uint8_op, ArchOpcode uint16_op, ArchOpcode word32_op,
2421 ArchOpcode word64_op) {
2422 MachineType type = AtomicOpType(node->op());
2423 ArchOpcode opcode = kArchNop;
2425 if (type == MachineType::Uint8()) {
2427 }
else if (type == MachineType::Uint16()) {
2429 }
else if (type == MachineType::Uint32()) {
2431 }
else if (type == MachineType::Uint64()) {
2437 VisitAtomicBinop(
this, node, opcode);
2440 #define VISIT_ATOMIC64_BINOP(op) \ 2441 void InstructionSelector::VisitWord64Atomic##op(Node* node) { \ 2442 VisitWord64AtomicBinaryOperation( \ 2443 node, kS390_Word64Atomic##op##Uint8, kS390_Word64Atomic##op##Uint16, \ 2444 kS390_Word64Atomic##op##Uint32, kS390_Word64Atomic##op##Uint64); \ 2446 VISIT_ATOMIC64_BINOP(Add)
2447 VISIT_ATOMIC64_BINOP(Sub)
2448 VISIT_ATOMIC64_BINOP(And)
2449 VISIT_ATOMIC64_BINOP(Or)
2450 VISIT_ATOMIC64_BINOP(Xor)
2451 #undef VISIT_ATOMIC64_BINOP 2453 void InstructionSelector::VisitWord64AtomicLoad(Node* node) {
2454 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
2459 void InstructionSelector::VisitWord64AtomicStore(Node* node) {
2460 MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
2461 VisitGeneralStore(
this, node, rep);
2464 void InstructionSelector::VisitI32x4Splat(Node* node) { UNIMPLEMENTED(); }
2466 void InstructionSelector::VisitI32x4ExtractLane(Node* node) { UNIMPLEMENTED(); }
2468 void InstructionSelector::VisitI32x4ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2470 void InstructionSelector::VisitI32x4Add(Node* node) { UNIMPLEMENTED(); }
2472 void InstructionSelector::VisitI32x4Sub(Node* node) { UNIMPLEMENTED(); }
2474 void InstructionSelector::VisitI32x4Shl(Node* node) { UNIMPLEMENTED(); }
2476 void InstructionSelector::VisitI32x4ShrS(Node* node) { UNIMPLEMENTED(); }
2478 void InstructionSelector::VisitI32x4Mul(Node* node) { UNIMPLEMENTED(); }
2480 void InstructionSelector::VisitI32x4MaxS(Node* node) { UNIMPLEMENTED(); }
2482 void InstructionSelector::VisitI32x4MinS(Node* node) { UNIMPLEMENTED(); }
2484 void InstructionSelector::VisitI32x4Eq(Node* node) { UNIMPLEMENTED(); }
2486 void InstructionSelector::VisitI32x4Ne(Node* node) { UNIMPLEMENTED(); }
2488 void InstructionSelector::VisitI32x4MinU(Node* node) { UNIMPLEMENTED(); }
2490 void InstructionSelector::VisitI32x4MaxU(Node* node) { UNIMPLEMENTED(); }
2492 void InstructionSelector::VisitI32x4ShrU(Node* node) { UNIMPLEMENTED(); }
2494 void InstructionSelector::VisitI32x4Neg(Node* node) { UNIMPLEMENTED(); }
2496 void InstructionSelector::VisitI32x4GtS(Node* node) { UNIMPLEMENTED(); }
2498 void InstructionSelector::VisitI32x4GeS(Node* node) { UNIMPLEMENTED(); }
2500 void InstructionSelector::VisitI32x4GtU(Node* node) { UNIMPLEMENTED(); }
2502 void InstructionSelector::VisitI32x4GeU(Node* node) { UNIMPLEMENTED(); }
2504 void InstructionSelector::VisitI16x8Splat(Node* node) { UNIMPLEMENTED(); }
2506 void InstructionSelector::VisitI16x8ExtractLane(Node* node) { UNIMPLEMENTED(); }
2508 void InstructionSelector::VisitI16x8ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2510 void InstructionSelector::VisitI16x8Shl(Node* node) { UNIMPLEMENTED(); }
2512 void InstructionSelector::VisitI16x8ShrS(Node* node) { UNIMPLEMENTED(); }
2514 void InstructionSelector::VisitI16x8ShrU(Node* node) { UNIMPLEMENTED(); }
2516 void InstructionSelector::VisitI16x8Add(Node* node) { UNIMPLEMENTED(); }
2518 void InstructionSelector::VisitI16x8AddSaturateS(Node* node) {
2522 void InstructionSelector::VisitI16x8Sub(Node* node) { UNIMPLEMENTED(); }
2524 void InstructionSelector::VisitI16x8SubSaturateS(Node* node) {
2528 void InstructionSelector::VisitI16x8Mul(Node* node) { UNIMPLEMENTED(); }
2530 void InstructionSelector::VisitI16x8MinS(Node* node) { UNIMPLEMENTED(); }
2532 void InstructionSelector::VisitI16x8MaxS(Node* node) { UNIMPLEMENTED(); }
2534 void InstructionSelector::VisitI16x8Eq(Node* node) { UNIMPLEMENTED(); }
2536 void InstructionSelector::VisitI16x8Ne(Node* node) { UNIMPLEMENTED(); }
2538 void InstructionSelector::VisitI16x8AddSaturateU(Node* node) {
2542 void InstructionSelector::VisitI16x8SubSaturateU(Node* node) {
2546 void InstructionSelector::VisitI16x8MinU(Node* node) { UNIMPLEMENTED(); }
2548 void InstructionSelector::VisitI16x8MaxU(Node* node) { UNIMPLEMENTED(); }
2550 void InstructionSelector::VisitI16x8Neg(Node* node) { UNIMPLEMENTED(); }
2552 void InstructionSelector::VisitI16x8GtS(Node* node) { UNIMPLEMENTED(); }
2554 void InstructionSelector::VisitI16x8GeS(Node* node) { UNIMPLEMENTED(); }
2556 void InstructionSelector::VisitI16x8GtU(Node* node) { UNIMPLEMENTED(); }
2558 void InstructionSelector::VisitI16x8GeU(Node* node) { UNIMPLEMENTED(); }
2560 void InstructionSelector::VisitI8x16Neg(Node* node) { UNIMPLEMENTED(); }
2562 void InstructionSelector::VisitI8x16Splat(Node* node) { UNIMPLEMENTED(); }
2564 void InstructionSelector::VisitI8x16ExtractLane(Node* node) { UNIMPLEMENTED(); }
2566 void InstructionSelector::VisitI8x16ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2568 void InstructionSelector::VisitI8x16Add(Node* node) { UNIMPLEMENTED(); }
2570 void InstructionSelector::VisitI8x16AddSaturateS(Node* node) {
2574 void InstructionSelector::VisitI8x16Sub(Node* node) { UNIMPLEMENTED(); }
2576 void InstructionSelector::VisitI8x16SubSaturateS(Node* node) {
2580 void InstructionSelector::VisitI8x16MinS(Node* node) { UNIMPLEMENTED(); }
2582 void InstructionSelector::VisitI8x16MaxS(Node* node) { UNIMPLEMENTED(); }
2584 void InstructionSelector::VisitI8x16Eq(Node* node) { UNIMPLEMENTED(); }
2586 void InstructionSelector::VisitI8x16Ne(Node* node) { UNIMPLEMENTED(); }
2588 void InstructionSelector::VisitI8x16GtS(Node* node) { UNIMPLEMENTED(); }
2590 void InstructionSelector::VisitI8x16GeS(Node* node) { UNIMPLEMENTED(); }
2592 void InstructionSelector::VisitI8x16AddSaturateU(Node* node) {
2596 void InstructionSelector::VisitI8x16SubSaturateU(Node* node) {
2600 void InstructionSelector::VisitI8x16MinU(Node* node) { UNIMPLEMENTED(); }
2602 void InstructionSelector::VisitI8x16MaxU(Node* node) { UNIMPLEMENTED(); }
2604 void InstructionSelector::VisitI8x16GtU(Node* node) { UNIMPLEMENTED(); }
2606 void InstructionSelector::VisitI8x16GeU(Node* node) { UNIMPLEMENTED(); }
2608 void InstructionSelector::VisitS128And(Node* node) { UNIMPLEMENTED(); }
2610 void InstructionSelector::VisitS128Or(Node* node) { UNIMPLEMENTED(); }
2612 void InstructionSelector::VisitS128Xor(Node* node) { UNIMPLEMENTED(); }
2614 void InstructionSelector::VisitS128Not(Node* node) { UNIMPLEMENTED(); }
2616 void InstructionSelector::VisitS128Zero(Node* node) { UNIMPLEMENTED(); }
2618 void InstructionSelector::VisitF32x4Eq(Node* node) { UNIMPLEMENTED(); }
2620 void InstructionSelector::VisitF32x4Ne(Node* node) { UNIMPLEMENTED(); }
2622 void InstructionSelector::VisitF32x4Lt(Node* node) { UNIMPLEMENTED(); }
2624 void InstructionSelector::VisitF32x4Le(Node* node) { UNIMPLEMENTED(); }
2626 void InstructionSelector::VisitF32x4Splat(Node* node) { UNIMPLEMENTED(); }
2628 void InstructionSelector::VisitF32x4ExtractLane(Node* node) { UNIMPLEMENTED(); }
2630 void InstructionSelector::VisitF32x4ReplaceLane(Node* node) { UNIMPLEMENTED(); }
2632 void InstructionSelector::EmitPrepareResults(
2633 ZoneVector<PushParameter>* results,
const CallDescriptor* call_descriptor,
2638 void InstructionSelector::VisitF32x4Add(Node* node) { UNIMPLEMENTED(); }
2640 void InstructionSelector::VisitF32x4Sub(Node* node) { UNIMPLEMENTED(); }
2642 void InstructionSelector::VisitF32x4Mul(Node* node) { UNIMPLEMENTED(); }
2644 void InstructionSelector::VisitF32x4Min(Node* node) { UNIMPLEMENTED(); }
2646 void InstructionSelector::VisitF32x4Max(Node* node) { UNIMPLEMENTED(); }
2648 void InstructionSelector::VisitS128Select(Node* node) { UNIMPLEMENTED(); }
2650 void InstructionSelector::VisitF32x4Neg(Node* node) { UNIMPLEMENTED(); }
2652 void InstructionSelector::VisitF32x4Abs(Node* node) { UNIMPLEMENTED(); }
2654 void InstructionSelector::VisitF32x4RecipSqrtApprox(Node* node) {
2658 void InstructionSelector::VisitF32x4RecipApprox(Node* node) { UNIMPLEMENTED(); }
2660 void InstructionSelector::VisitF32x4AddHoriz(Node* node) { UNIMPLEMENTED(); }
2661 void InstructionSelector::VisitI32x4AddHoriz(Node* node) { UNIMPLEMENTED(); }
2662 void InstructionSelector::VisitI16x8AddHoriz(Node* node) { UNIMPLEMENTED(); }
2664 void InstructionSelector::VisitF32x4SConvertI32x4(Node* node) {
2668 void InstructionSelector::VisitF32x4UConvertI32x4(Node* node) {
2672 void InstructionSelector::VisitI32x4SConvertF32x4(Node* node) {
2676 void InstructionSelector::VisitI32x4UConvertF32x4(Node* node) {
2680 void InstructionSelector::VisitI32x4SConvertI16x8Low(Node* node) {
2684 void InstructionSelector::VisitI32x4SConvertI16x8High(Node* node) {
2688 void InstructionSelector::VisitI32x4UConvertI16x8Low(Node* node) {
2692 void InstructionSelector::VisitI32x4UConvertI16x8High(Node* node) {
2696 void InstructionSelector::VisitI16x8SConvertI8x16Low(Node* node) {
2700 void InstructionSelector::VisitI16x8SConvertI8x16High(Node* node) {
2704 void InstructionSelector::VisitI16x8UConvertI8x16Low(Node* node) {
2708 void InstructionSelector::VisitI16x8UConvertI8x16High(Node* node) {
2712 void InstructionSelector::VisitI16x8SConvertI32x4(Node* node) {
2715 void InstructionSelector::VisitI16x8UConvertI32x4(Node* node) {
2719 void InstructionSelector::VisitI8x16SConvertI16x8(Node* node) {
2723 void InstructionSelector::VisitI8x16UConvertI16x8(Node* node) {
2727 void InstructionSelector::VisitS1x4AnyTrue(Node* node) { UNIMPLEMENTED(); }
2729 void InstructionSelector::VisitS1x4AllTrue(Node* node) { UNIMPLEMENTED(); }
2731 void InstructionSelector::VisitS1x8AnyTrue(Node* node) { UNIMPLEMENTED(); }
2733 void InstructionSelector::VisitS1x8AllTrue(Node* node) { UNIMPLEMENTED(); }
2735 void InstructionSelector::VisitS1x16AnyTrue(Node* node) { UNIMPLEMENTED(); }
2737 void InstructionSelector::VisitS1x16AllTrue(Node* node) { UNIMPLEMENTED(); }
2739 void InstructionSelector::VisitI8x16Shl(Node* node) { UNIMPLEMENTED(); }
2741 void InstructionSelector::VisitI8x16ShrS(Node* node) { UNIMPLEMENTED(); }
2743 void InstructionSelector::VisitI8x16ShrU(Node* node) { UNIMPLEMENTED(); }
2745 void InstructionSelector::VisitI8x16Mul(Node* node) { UNIMPLEMENTED(); }
2748 MachineOperatorBuilder::Flags
2749 InstructionSelector::SupportedMachineOperatorFlags() {
2750 return MachineOperatorBuilder::kFloat32RoundDown |
2751 MachineOperatorBuilder::kFloat64RoundDown |
2752 MachineOperatorBuilder::kFloat32RoundUp |
2753 MachineOperatorBuilder::kFloat64RoundUp |
2754 MachineOperatorBuilder::kFloat32RoundTruncate |
2755 MachineOperatorBuilder::kFloat64RoundTruncate |
2756 MachineOperatorBuilder::kFloat64RoundTiesAway |
2757 MachineOperatorBuilder::kWord32Popcnt |
2758 MachineOperatorBuilder::kInt32AbsWithOverflow |
2759 MachineOperatorBuilder::kInt64AbsWithOverflow |
2760 MachineOperatorBuilder::kWord64Popcnt;
2764 MachineOperatorBuilder::AlignmentRequirements
2765 InstructionSelector::AlignmentRequirements() {
2766 return MachineOperatorBuilder::AlignmentRequirements::
2767 FullUnalignedAccessSupport();