5 #include "src/compiler/backend/code-generator.h" 7 #include "src/assembler-inl.h" 8 #include "src/callable.h" 9 #include "src/compiler/backend/code-generator-impl.h" 10 #include "src/compiler/backend/gap-resolver.h" 11 #include "src/compiler/node-matchers.h" 12 #include "src/compiler/osr.h" 13 #include "src/macro-assembler.h" 14 #include "src/optimized-compilation-info.h" 15 #include "src/wasm/wasm-code-manager.h" 16 #include "src/wasm/wasm-objects.h" 24 #define kScratchReg ip 32 size_t OutputCount() {
return instr_->OutputCount(); }
34 bool Is64BitOperand(
int index) {
35 return LocationOperand::cast(instr_->InputAt(index))->representation() ==
36 MachineRepresentation::kWord64;
39 bool Is32BitOperand(
int index) {
40 return LocationOperand::cast(instr_->InputAt(index))->representation() ==
41 MachineRepresentation::kWord32;
44 bool CompareLogical()
const {
45 switch (instr_->flags_condition()) {
46 case kUnsignedLessThan:
47 case kUnsignedGreaterThanOrEqual:
48 case kUnsignedLessThanOrEqual:
49 case kUnsignedGreaterThan:
57 Operand InputImmediate(
size_t index) {
58 Constant constant = ToConstant(instr_->InputAt(index));
59 switch (constant.type()) {
60 case Constant::kInt32:
61 return Operand(constant.ToInt32());
62 case Constant::kFloat32:
63 return Operand::EmbeddedNumber(constant.ToFloat32());
64 case Constant::kFloat64:
65 return Operand::EmbeddedNumber(constant.ToFloat64().value());
66 case Constant::kInt64:
67 #if V8_TARGET_ARCH_S390X 68 return Operand(constant.ToInt64());
70 case Constant::kExternalReference:
71 return Operand(constant.ToExternalReference());
72 case Constant::kDelayedStringConstant:
73 return Operand::EmbeddedStringConstant(
74 constant.ToDelayedStringConstant());
75 case Constant::kHeapObject:
76 case Constant::kRpoNumber:
82 MemOperand MemoryOperand(AddressingMode* mode,
size_t* first_index) {
83 const size_t index = *first_index;
84 if (mode) *mode = AddressingModeField::decode(instr_->opcode());
85 switch (AddressingModeField::decode(instr_->opcode())) {
90 return MemOperand(InputRegister(index + 0), 0);
93 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
96 return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
99 return MemOperand(InputRegister(index + 0), InputRegister(index + 1),
100 InputInt32(index + 2));
105 MemOperand MemoryOperand(AddressingMode* mode =
nullptr,
106 size_t first_index = 0) {
107 return MemoryOperand(mode, &first_index);
112 DCHECK(op->IsStackSlot() || op->IsFPStackSlot());
113 return SlotToMemOperand(AllocatedOperand::cast(op)->index());
117 FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
118 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
123 return SlotToMemOperand(AllocatedOperand::cast(op)->index());
127 #if V8_TARGET_ARCH_S390X && !V8_TARGET_LITTLE_ENDIAN 130 return MemOperand(mem.rb(), mem.rx(), mem.offset() + 4);
132 return InputStackSlot(index);
137 static inline bool HasRegisterOutput(
Instruction* instr,
int index = 0) {
138 return instr->OutputCount() > 0 && instr->OutputAt(index)->IsRegister();
141 static inline bool HasFPRegisterInput(
Instruction* instr,
int index) {
142 return instr->InputAt(index)->IsFPRegister();
145 static inline bool HasRegisterInput(Instruction* instr,
int index) {
146 return instr->InputAt(index)->IsRegister() ||
147 HasFPRegisterInput(instr, index);
150 static inline bool HasImmediateInput(Instruction* instr,
size_t index) {
151 return instr->InputAt(index)->IsImmediate();
154 static inline bool HasFPStackSlotInput(Instruction* instr,
size_t index) {
155 return instr->InputAt(index)->IsFPStackSlot();
158 static inline bool HasStackSlotInput(Instruction* instr,
size_t index) {
159 return instr->InputAt(index)->IsStackSlot() ||
160 HasFPStackSlotInput(instr, index);
165 class OutOfLineRecordWrite final :
public OutOfLineCode {
167 OutOfLineRecordWrite(CodeGenerator* gen, Register
object, Register offset,
168 Register value, Register scratch0, Register scratch1,
169 RecordWriteMode mode, StubCallMode stub_mode)
170 : OutOfLineCode(gen),
173 offset_immediate_(0),
178 stub_mode_(stub_mode),
179 must_save_lr_(!gen->frame_access_state()->has_frame()),
180 zone_(gen->zone()) {}
182 OutOfLineRecordWrite(CodeGenerator* gen, Register
object, int32_t offset,
183 Register value, Register scratch0, Register scratch1,
184 RecordWriteMode mode, StubCallMode stub_mode)
185 : OutOfLineCode(gen),
188 offset_immediate_(offset),
193 stub_mode_(stub_mode),
194 must_save_lr_(!gen->frame_access_state()->has_frame()),
195 zone_(gen->zone()) {}
197 void Generate() final {
198 if (mode_ > RecordWriteMode::kValueIsPointer) {
199 __ JumpIfSmi(value_, exit());
201 __ CheckPageFlag(value_, scratch0_,
202 MemoryChunk::kPointersToHereAreInterestingMask, eq,
204 if (offset_ == no_reg) {
205 __ AddP(scratch1_, object_, Operand(offset_immediate_));
207 DCHECK_EQ(0, offset_immediate_);
208 __ AddP(scratch1_, object_, offset_);
210 RememberedSetAction
const remembered_set_action =
211 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
212 : OMIT_REMEMBERED_SET;
213 SaveFPRegsMode
const save_fp_mode =
214 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
219 if (stub_mode_ == StubCallMode::kCallWasmRuntimeStub) {
220 __ CallRecordWriteStub(object_, scratch1_, remembered_set_action,
221 save_fp_mode, wasm::WasmCode::kWasmRecordWrite);
223 __ CallRecordWriteStub(object_, scratch1_, remembered_set_action,
233 Register
const object_;
234 Register
const offset_;
235 int32_t
const offset_immediate_;
236 Register
const value_;
237 Register
const scratch0_;
238 Register
const scratch1_;
239 RecordWriteMode
const mode_;
240 StubCallMode stub_mode_;
245 Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) {
251 case kUnsignedLessThan:
253 if (op == kS390_LoadAndTestWord32 || op == kS390_LoadAndTestWord64)
256 case kSignedLessThan:
258 case kUnsignedGreaterThanOrEqual:
260 if (op == kS390_LoadAndTestWord32 || op == kS390_LoadAndTestWord64)
263 case kSignedGreaterThanOrEqual:
265 case kUnsignedLessThanOrEqual:
267 if (op == kS390_LoadAndTestWord32 || op == kS390_LoadAndTestWord64)
270 case kSignedLessThanOrEqual:
272 case kUnsignedGreaterThan:
274 if (op == kS390_LoadAndTestWord32 || op == kS390_LoadAndTestWord64)
277 case kSignedGreaterThan:
314 #define GET_MEMOPERAND32(ret, fi) \ 316 AddressingMode mode = AddressingModeField::decode(instr->opcode()); \ 317 MemOperand mem(r0); \ 318 if (mode != kMode_None) { \ 319 size_t first_index = (fi); \ 320 mem = i.MemoryOperand(&mode, &first_index); \ 323 mem = i.InputStackSlot32(fi); \ 328 #define GET_MEMOPERAND(ret, fi) \ 330 AddressingMode mode = AddressingModeField::decode(instr->opcode()); \ 331 MemOperand mem(r0); \ 332 if (mode != kMode_None) { \ 333 size_t first_index = (fi); \ 334 mem = i.MemoryOperand(&mode, &first_index); \ 337 mem = i.InputStackSlot(fi); \ 342 #define RRInstr(instr) \ 344 DCHECK(i.OutputRegister() == i.InputRegister(0)); \ 345 __ instr(i.OutputRegister(), i.InputRegister(1)); \ 348 #define RIInstr(instr) \ 350 DCHECK(i.OutputRegister() == i.InputRegister(0)); \ 351 __ instr(i.OutputRegister(), i.InputImmediate(1)); \ 354 #define RMInstr(instr, GETMEM) \ 356 DCHECK(i.OutputRegister() == i.InputRegister(0)); \ 358 __ instr(i.OutputRegister(), GETMEM(ret, 1)); \ 361 #define RM32Instr(instr) RMInstr(instr, GET_MEMOPERAND32) 362 #define RM64Instr(instr) RMInstr(instr, GET_MEMOPERAND) 364 #define RRRInstr(instr) \ 366 __ instr(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1)); \ 369 #define RRIInstr(instr) \ 371 __ instr(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1)); \ 374 #define RRMInstr(instr, GETMEM) \ 377 __ instr(i.OutputRegister(), i.InputRegister(0), GETMEM(ret, 1)); \ 380 #define RRM32Instr(instr) RRMInstr(instr, GET_MEMOPERAND32) 381 #define RRM64Instr(instr) RRMInstr(instr, GET_MEMOPERAND) 383 #define DDInstr(instr) \ 385 DCHECK(i.OutputDoubleRegister() == i.InputDoubleRegister(0)); \ 386 __ instr(i.OutputDoubleRegister(), i.InputDoubleRegister(1)); \ 390 #define DMInstr(instr) \ 392 DCHECK(i.OutputDoubleRegister() == i.InputDoubleRegister(0)); \ 394 __ instr(i.OutputDoubleRegister(), GET_MEMOPERAND(ret, 1)); \ 398 #define DMTInstr(instr) \ 400 DCHECK(i.OutputDoubleRegister() == i.InputDoubleRegister(0)); \ 402 __ instr(i.OutputDoubleRegister(), GET_MEMOPERAND(ret, 1), \ 403 kScratchDoubleReg); \ 407 #define R_MInstr(instr) \ 410 __ instr(i.OutputRegister(), GET_MEMOPERAND(ret, 0)); \ 414 #define R_DInstr(instr) \ 416 __ instr(i.OutputRegister(), i.InputDoubleRegister(0)); \ 420 #define D_DInstr(instr) \ 422 __ instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \ 426 #define D_MInstr(instr) \ 429 __ instr(i.OutputDoubleRegister(), GET_MEMOPERAND(ret, 0)); \ 433 #define D_MTInstr(instr) \ 436 __ instr(i.OutputDoubleRegister(), GET_MEMOPERAND(ret, 0), \ 437 kScratchDoubleReg); \ 441 static int nullInstr() { UNREACHABLE(); }
443 template <
int numOfOperand,
class RType,
class MType,
class IType>
444 static inline int AssembleOp(Instruction* instr, RType r, MType m, IType
i) {
445 AddressingMode mode = AddressingModeField::decode(instr->opcode());
446 if (mode != kMode_None || HasStackSlotInput(instr, numOfOperand - 1)) {
448 }
else if (HasRegisterInput(instr, numOfOperand - 1)) {
450 }
else if (HasImmediateInput(instr, numOfOperand - 1)) {
457 template <
class _RR,
class _RM,
class _RI>
458 static inline int AssembleBinOp(Instruction* instr, _RR _rr, _RM _rm, _RI _ri) {
459 return AssembleOp<2>(instr, _rr, _rm, _ri);
462 template <
class _R,
class _M,
class _I>
463 static inline int AssembleUnaryOp(Instruction* instr, _R _r, _M _m, _I _i) {
464 return AssembleOp<1>(instr, _r, _m, _i);
467 #define ASSEMBLE_BIN_OP(_rr, _rm, _ri) AssembleBinOp(instr, _rr, _rm, _ri) 468 #define ASSEMBLE_UNARY_OP(_r, _m, _i) AssembleUnaryOp(instr, _r, _m, _i) 470 #ifdef V8_TARGET_ARCH_S390X 471 #define CHECK_AND_ZERO_EXT_OUTPUT(num) \ 473 DCHECK(HasImmediateInput(instr, (index))); \ 474 int doZeroExt = i.InputInt32(index); \ 475 if (doZeroExt) __ LoadlW(i.OutputRegister(), i.OutputRegister()); \ 478 #define ASSEMBLE_BIN32_OP(_rr, _rm, _ri) \ 479 { CHECK_AND_ZERO_EXT_OUTPUT(AssembleBinOp(instr, _rr, _rm, _ri)); } 481 #define ASSEMBLE_BIN32_OP ASSEMBLE_BIN_OP 482 #define CHECK_AND_ZERO_EXT_OUTPUT(num) 487 #define ASSEMBLE_FLOAT_UNOP(asm_instr) \ 489 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \ 492 #define ASSEMBLE_FLOAT_BINOP(asm_instr) \ 494 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \ 495 i.InputDoubleRegister(1)); \ 498 #define ASSEMBLE_COMPARE(cmp_instr, cmpl_instr) \ 500 AddressingMode mode = AddressingModeField::decode(instr->opcode()); \ 501 if (mode != kMode_None) { \ 502 size_t first_index = 1; \ 503 MemOperand operand = i.MemoryOperand(&mode, &first_index); \ 504 if (i.CompareLogical()) { \ 505 __ cmpl_instr(i.InputRegister(0), operand); \ 507 __ cmp_instr(i.InputRegister(0), operand); \ 509 } else if (HasRegisterInput(instr, 1)) { \ 510 if (i.CompareLogical()) { \ 511 __ cmpl_instr(i.InputRegister(0), i.InputRegister(1)); \ 513 __ cmp_instr(i.InputRegister(0), i.InputRegister(1)); \ 515 } else if (HasImmediateInput(instr, 1)) { \ 516 if (i.CompareLogical()) { \ 517 __ cmpl_instr(i.InputRegister(0), i.InputImmediate(1)); \ 519 __ cmp_instr(i.InputRegister(0), i.InputImmediate(1)); \ 522 DCHECK(HasStackSlotInput(instr, 1)); \ 523 if (i.CompareLogical()) { \ 524 __ cmpl_instr(i.InputRegister(0), i.InputStackSlot(1)); \ 526 __ cmp_instr(i.InputRegister(0), i.InputStackSlot(1)); \ 531 #define ASSEMBLE_COMPARE32(cmp_instr, cmpl_instr) \ 533 AddressingMode mode = AddressingModeField::decode(instr->opcode()); \ 534 if (mode != kMode_None) { \ 535 size_t first_index = 1; \ 536 MemOperand operand = i.MemoryOperand(&mode, &first_index); \ 537 if (i.CompareLogical()) { \ 538 __ cmpl_instr(i.InputRegister(0), operand); \ 540 __ cmp_instr(i.InputRegister(0), operand); \ 542 } else if (HasRegisterInput(instr, 1)) { \ 543 if (i.CompareLogical()) { \ 544 __ cmpl_instr(i.InputRegister(0), i.InputRegister(1)); \ 546 __ cmp_instr(i.InputRegister(0), i.InputRegister(1)); \ 548 } else if (HasImmediateInput(instr, 1)) { \ 549 if (i.CompareLogical()) { \ 550 __ cmpl_instr(i.InputRegister(0), i.InputImmediate(1)); \ 552 __ cmp_instr(i.InputRegister(0), i.InputImmediate(1)); \ 555 DCHECK(HasStackSlotInput(instr, 1)); \ 556 if (i.CompareLogical()) { \ 557 __ cmpl_instr(i.InputRegister(0), i.InputStackSlot32(1)); \ 559 __ cmp_instr(i.InputRegister(0), i.InputStackSlot32(1)); \ 564 #define ASSEMBLE_FLOAT_COMPARE(cmp_rr_instr, cmp_rm_instr, load_instr) \ 566 AddressingMode mode = AddressingModeField::decode(instr->opcode()); \ 567 if (mode != kMode_None) { \ 568 size_t first_index = 1; \ 569 MemOperand operand = i.MemoryOperand(&mode, &first_index); \ 570 __ cmp_rm_instr(i.InputDoubleRegister(0), operand); \ 571 } else if (HasFPRegisterInput(instr, 1)) { \ 572 __ cmp_rr_instr(i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \ 574 USE(HasFPStackSlotInput); \ 575 DCHECK(HasFPStackSlotInput(instr, 1)); \ 576 MemOperand operand = i.InputStackSlot(1); \ 577 if (operand.offset() >= 0) { \ 578 __ cmp_rm_instr(i.InputDoubleRegister(0), operand); \ 580 __ load_instr(kScratchDoubleReg, operand); \ 581 __ cmp_rr_instr(i.InputDoubleRegister(0), kScratchDoubleReg); \ 590 #define ASSEMBLE_MODULO(div_instr, shift_instr) \ 592 __ LoadRR(r0, i.InputRegister(0)); \ 593 __ shift_instr(r0, Operand(32)); \ 594 __ div_instr(r0, i.InputRegister(1)); \ 595 __ LoadlW(i.OutputRegister(), r0); \ 598 #define ASSEMBLE_FLOAT_MODULO() \ 600 FrameScope scope(tasm(), StackFrame::MANUAL); \ 601 __ PrepareCallCFunction(0, 2, kScratchReg); \ 602 __ MovToFloatParameters(i.InputDoubleRegister(0), \ 603 i.InputDoubleRegister(1)); \ 604 __ CallCFunction(ExternalReference::mod_two_doubles_operation(), 0, 2); \ 605 __ MovFromFloatResult(i.OutputDoubleRegister()); \ 608 #define ASSEMBLE_IEEE754_UNOP(name) \ 612 FrameScope scope(tasm(), StackFrame::MANUAL); \ 613 __ PrepareCallCFunction(0, 1, kScratchReg); \ 614 __ MovToFloatParameter(i.InputDoubleRegister(0)); \ 615 __ CallCFunction(ExternalReference::ieee754_##name##_function(), 0, 1); \ 617 __ MovFromFloatResult(i.OutputDoubleRegister()); \ 620 #define ASSEMBLE_IEEE754_BINOP(name) \ 624 FrameScope scope(tasm(), StackFrame::MANUAL); \ 625 __ PrepareCallCFunction(0, 2, kScratchReg); \ 626 __ MovToFloatParameters(i.InputDoubleRegister(0), \ 627 i.InputDoubleRegister(1)); \ 628 __ CallCFunction(ExternalReference::ieee754_##name##_function(), 0, 2); \ 630 __ MovFromFloatResult(i.OutputDoubleRegister()); \ 633 #define ASSEMBLE_DOUBLE_MAX() \ 635 DoubleRegister left_reg = i.InputDoubleRegister(0); \ 636 DoubleRegister right_reg = i.InputDoubleRegister(1); \ 637 DoubleRegister result_reg = i.OutputDoubleRegister(); \ 638 Label check_nan_left, check_zero, return_left, return_right, done; \ 639 __ cdbr(left_reg, right_reg); \ 640 __ bunordered(&check_nan_left, Label::kNear); \ 641 __ beq(&check_zero); \ 642 __ bge(&return_left, Label::kNear); \ 643 __ b(&return_right, Label::kNear); \ 645 __ bind(&check_zero); \ 646 __ lzdr(kDoubleRegZero); \ 647 __ cdbr(left_reg, kDoubleRegZero); \ 649 __ bne(&return_left, Label::kNear); \ 653 __ ldr(result_reg, left_reg); \ 654 __ adbr(result_reg, right_reg); \ 655 __ b(&done, Label::kNear); \ 657 __ bind(&check_nan_left); \ 658 __ cdbr(left_reg, left_reg); \ 660 __ bunordered(&return_left, Label::kNear); \ 662 __ bind(&return_right); \ 663 if (right_reg != result_reg) { \ 664 __ ldr(result_reg, right_reg); \ 666 __ b(&done, Label::kNear); \ 668 __ bind(&return_left); \ 669 if (left_reg != result_reg) { \ 670 __ ldr(result_reg, left_reg); \ 675 #define ASSEMBLE_DOUBLE_MIN() \ 677 DoubleRegister left_reg = i.InputDoubleRegister(0); \ 678 DoubleRegister right_reg = i.InputDoubleRegister(1); \ 679 DoubleRegister result_reg = i.OutputDoubleRegister(); \ 680 Label check_nan_left, check_zero, return_left, return_right, done; \ 681 __ cdbr(left_reg, right_reg); \ 682 __ bunordered(&check_nan_left, Label::kNear); \ 683 __ beq(&check_zero); \ 684 __ ble(&return_left, Label::kNear); \ 685 __ b(&return_right, Label::kNear); \ 687 __ bind(&check_zero); \ 688 __ lzdr(kDoubleRegZero); \ 689 __ cdbr(left_reg, kDoubleRegZero); \ 691 __ bne(&return_left, Label::kNear); \ 695 __ lcdbr(left_reg, left_reg); \ 696 __ ldr(result_reg, left_reg); \ 697 if (left_reg == right_reg) { \ 698 __ adbr(result_reg, right_reg); \ 700 __ sdbr(result_reg, right_reg); \ 702 __ lcdbr(result_reg, result_reg); \ 703 __ b(&done, Label::kNear); \ 705 __ bind(&check_nan_left); \ 706 __ cdbr(left_reg, left_reg); \ 708 __ bunordered(&return_left, Label::kNear); \ 710 __ bind(&return_right); \ 711 if (right_reg != result_reg) { \ 712 __ ldr(result_reg, right_reg); \ 714 __ b(&done, Label::kNear); \ 716 __ bind(&return_left); \ 717 if (left_reg != result_reg) { \ 718 __ ldr(result_reg, left_reg); \ 723 #define ASSEMBLE_FLOAT_MAX() \ 725 DoubleRegister left_reg = i.InputDoubleRegister(0); \ 726 DoubleRegister right_reg = i.InputDoubleRegister(1); \ 727 DoubleRegister result_reg = i.OutputDoubleRegister(); \ 728 Label check_nan_left, check_zero, return_left, return_right, done; \ 729 __ cebr(left_reg, right_reg); \ 730 __ bunordered(&check_nan_left, Label::kNear); \ 731 __ beq(&check_zero); \ 732 __ bge(&return_left, Label::kNear); \ 733 __ b(&return_right, Label::kNear); \ 735 __ bind(&check_zero); \ 736 __ lzdr(kDoubleRegZero); \ 737 __ cebr(left_reg, kDoubleRegZero); \ 739 __ bne(&return_left, Label::kNear); \ 743 __ ldr(result_reg, left_reg); \ 744 __ aebr(result_reg, right_reg); \ 745 __ b(&done, Label::kNear); \ 747 __ bind(&check_nan_left); \ 748 __ cebr(left_reg, left_reg); \ 750 __ bunordered(&return_left, Label::kNear); \ 752 __ bind(&return_right); \ 753 if (right_reg != result_reg) { \ 754 __ ldr(result_reg, right_reg); \ 756 __ b(&done, Label::kNear); \ 758 __ bind(&return_left); \ 759 if (left_reg != result_reg) { \ 760 __ ldr(result_reg, left_reg); \ 765 #define ASSEMBLE_FLOAT_MIN() \ 767 DoubleRegister left_reg = i.InputDoubleRegister(0); \ 768 DoubleRegister right_reg = i.InputDoubleRegister(1); \ 769 DoubleRegister result_reg = i.OutputDoubleRegister(); \ 770 Label check_nan_left, check_zero, return_left, return_right, done; \ 771 __ cebr(left_reg, right_reg); \ 772 __ bunordered(&check_nan_left, Label::kNear); \ 773 __ beq(&check_zero); \ 774 __ ble(&return_left, Label::kNear); \ 775 __ b(&return_right, Label::kNear); \ 777 __ bind(&check_zero); \ 778 __ lzdr(kDoubleRegZero); \ 779 __ cebr(left_reg, kDoubleRegZero); \ 781 __ bne(&return_left, Label::kNear); \ 785 __ lcebr(left_reg, left_reg); \ 786 __ ldr(result_reg, left_reg); \ 787 if (left_reg == right_reg) { \ 788 __ aebr(result_reg, right_reg); \ 790 __ sebr(result_reg, right_reg); \ 792 __ lcebr(result_reg, result_reg); \ 793 __ b(&done, Label::kNear); \ 795 __ bind(&check_nan_left); \ 796 __ cebr(left_reg, left_reg); \ 798 __ bunordered(&return_left, Label::kNear); \ 800 __ bind(&return_right); \ 801 if (right_reg != result_reg) { \ 802 __ ldr(result_reg, right_reg); \ 804 __ b(&done, Label::kNear); \ 806 __ bind(&return_left); \ 807 if (left_reg != result_reg) { \ 808 __ ldr(result_reg, left_reg); \ 814 #define ASSEMBLE_LOAD_FLOAT(asm_instr) \ 816 DoubleRegister result = i.OutputDoubleRegister(); \ 817 AddressingMode mode = kMode_None; \ 818 MemOperand operand = i.MemoryOperand(&mode); \ 819 __ asm_instr(result, operand); \ 822 #define ASSEMBLE_LOAD_INTEGER(asm_instr) \ 824 Register result = i.OutputRegister(); \ 825 AddressingMode mode = kMode_None; \ 826 MemOperand operand = i.MemoryOperand(&mode); \ 827 __ asm_instr(result, operand); \ 830 #define ASSEMBLE_LOADANDTEST64(asm_instr_rr, asm_instr_rm) \ 832 AddressingMode mode = AddressingModeField::decode(instr->opcode()); \ 833 Register dst = HasRegisterOutput(instr) ? i.OutputRegister() : r0; \ 834 if (mode != kMode_None) { \ 835 size_t first_index = 0; \ 836 MemOperand operand = i.MemoryOperand(&mode, &first_index); \ 837 __ asm_instr_rm(dst, operand); \ 838 } else if (HasRegisterInput(instr, 0)) { \ 839 __ asm_instr_rr(dst, i.InputRegister(0)); \ 841 DCHECK(HasStackSlotInput(instr, 0)); \ 842 __ asm_instr_rm(dst, i.InputStackSlot(0)); \ 846 #define ASSEMBLE_LOADANDTEST32(asm_instr_rr, asm_instr_rm) \ 848 AddressingMode mode = AddressingModeField::decode(instr->opcode()); \ 849 Register dst = HasRegisterOutput(instr) ? i.OutputRegister() : r0; \ 850 if (mode != kMode_None) { \ 851 size_t first_index = 0; \ 852 MemOperand operand = i.MemoryOperand(&mode, &first_index); \ 853 __ asm_instr_rm(dst, operand); \ 854 } else if (HasRegisterInput(instr, 0)) { \ 855 __ asm_instr_rr(dst, i.InputRegister(0)); \ 857 DCHECK(HasStackSlotInput(instr, 0)); \ 858 __ asm_instr_rm(dst, i.InputStackSlot32(0)); \ 862 #define ASSEMBLE_STORE_FLOAT32() \ 865 AddressingMode mode = kMode_None; \ 866 MemOperand operand = i.MemoryOperand(&mode, &index); \ 867 DoubleRegister value = i.InputDoubleRegister(index); \ 868 __ StoreFloat32(value, operand); \ 871 #define ASSEMBLE_STORE_DOUBLE() \ 874 AddressingMode mode = kMode_None; \ 875 MemOperand operand = i.MemoryOperand(&mode, &index); \ 876 DoubleRegister value = i.InputDoubleRegister(index); \ 877 __ StoreDouble(value, operand); \ 880 #define ASSEMBLE_STORE_INTEGER(asm_instr) \ 883 AddressingMode mode = kMode_None; \ 884 MemOperand operand = i.MemoryOperand(&mode, &index); \ 885 Register value = i.InputRegister(index); \ 886 __ asm_instr(value, operand); \ 889 #define ATOMIC_COMP_EXCHANGE(start, end, shift_amount, offset) \ 891 __ LoadlW(temp0, MemOperand(addr, offset)); \ 892 __ llgfr(temp1, temp0); \ 893 __ RotateInsertSelectBits(temp0, old_val, Operand(start), Operand(end), \ 894 Operand(shift_amount), false); \ 895 __ RotateInsertSelectBits(temp1, new_val, Operand(start), Operand(end), \ 896 Operand(shift_amount), false); \ 897 __ CmpAndSwap(temp0, temp1, MemOperand(addr, offset)); \ 898 __ RotateInsertSelectBits(output, temp0, Operand(start + shift_amount), \ 899 Operand(end + shift_amount), \ 900 Operand(64 - shift_amount), true); \ 903 #ifdef V8_TARGET_BIG_ENDIAN 904 #define ATOMIC_COMP_EXCHANGE_BYTE(i) \ 906 constexpr int idx = (i); \ 907 static_assert(idx <= 3 && idx >= 0, "idx is out of range!"); \ 908 constexpr int start = 32 + 8 * idx; \ 909 constexpr int end = start + 7; \ 910 constexpr int shift_amount = (3 - idx) * 8; \ 911 ATOMIC_COMP_EXCHANGE(start, end, shift_amount, -idx); \ 913 #define ATOMIC_COMP_EXCHANGE_HALFWORD(i) \ 915 constexpr int idx = (i); \ 916 static_assert(idx <= 1 && idx >= 0, "idx is out of range!"); \ 917 constexpr int start = 32 + 16 * idx; \ 918 constexpr int end = start + 15; \ 919 constexpr int shift_amount = (1 - idx) * 16; \ 920 ATOMIC_COMP_EXCHANGE(start, end, shift_amount, -idx * 2); \ 923 #define ATOMIC_COMP_EXCHANGE_BYTE(i) \ 925 constexpr int idx = (i); \ 926 static_assert(idx <= 3 && idx >= 0, "idx is out of range!"); \ 927 constexpr int start = 32 + 8 * (3 - idx); \ 928 constexpr int end = start + 7; \ 929 constexpr int shift_amount = idx * 8; \ 930 ATOMIC_COMP_EXCHANGE(start, end, shift_amount, -idx); \ 932 #define ATOMIC_COMP_EXCHANGE_HALFWORD(i) \ 934 constexpr int idx = (i); \ 935 static_assert(idx <= 1 && idx >= 0, "idx is out of range!"); \ 936 constexpr int start = 32 + 16 * (1 - idx); \ 937 constexpr int end = start + 15; \ 938 constexpr int shift_amount = idx * 16; \ 939 ATOMIC_COMP_EXCHANGE(start, end, shift_amount, -idx * 2); \ 943 #define ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_BYTE(load_and_ext) \ 945 Register old_val = i.InputRegister(0); \ 946 Register new_val = i.InputRegister(1); \ 947 Register output = i.OutputRegister(); \ 948 Register addr = kScratchReg; \ 949 Register temp0 = r0; \ 950 Register temp1 = r1; \ 952 AddressingMode mode = kMode_None; \ 953 MemOperand op = i.MemoryOperand(&mode, &index); \ 954 Label three, two, one, done; \ 956 __ tmll(addr, Operand(3)); \ 957 __ b(Condition(1), &three); \ 958 __ b(Condition(2), &two); \ 959 __ b(Condition(4), &one); \ 961 ATOMIC_COMP_EXCHANGE_BYTE(0); \ 965 ATOMIC_COMP_EXCHANGE_BYTE(1); \ 969 ATOMIC_COMP_EXCHANGE_BYTE(2); \ 973 ATOMIC_COMP_EXCHANGE_BYTE(3); \ 975 __ load_and_ext(output, output); \ 978 #define ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_HALFWORD(load_and_ext) \ 980 Register old_val = i.InputRegister(0); \ 981 Register new_val = i.InputRegister(1); \ 982 Register output = i.OutputRegister(); \ 983 Register addr = kScratchReg; \ 984 Register temp0 = r0; \ 985 Register temp1 = r1; \ 987 AddressingMode mode = kMode_None; \ 988 MemOperand op = i.MemoryOperand(&mode, &index); \ 991 __ tmll(addr, Operand(3)); \ 992 __ b(Condition(2), &two); \ 993 ATOMIC_COMP_EXCHANGE_HALFWORD(0); \ 996 ATOMIC_COMP_EXCHANGE_HALFWORD(1); \ 998 __ load_and_ext(output, output); \ 1001 #define ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_WORD() \ 1003 Register new_val = i.InputRegister(1); \ 1004 Register output = i.OutputRegister(); \ 1005 Register addr = kScratchReg; \ 1007 AddressingMode mode = kMode_None; \ 1008 MemOperand op = i.MemoryOperand(&mode, &index); \ 1010 __ CmpAndSwap(output, new_val, MemOperand(addr)); \ 1011 __ LoadlW(output, output); \ 1014 #define ASSEMBLE_ATOMIC_BINOP_WORD(load_and_op) \ 1016 Register value = i.InputRegister(2); \ 1017 Register result = i.OutputRegister(0); \ 1018 Register addr = r1; \ 1019 AddressingMode mode = kMode_None; \ 1020 MemOperand op = i.MemoryOperand(&mode); \ 1022 __ load_and_op(result, value, MemOperand(addr)); \ 1023 __ LoadlW(result, result); \ 1026 #define ASSEMBLE_ATOMIC_BINOP_WORD64(load_and_op) \ 1028 Register value = i.InputRegister(2); \ 1029 Register result = i.OutputRegister(0); \ 1030 Register addr = r1; \ 1031 AddressingMode mode = kMode_None; \ 1032 MemOperand op = i.MemoryOperand(&mode); \ 1034 __ load_and_op(result, value, MemOperand(addr)); \ 1037 #define ATOMIC_BIN_OP(bin_inst, offset, shift_amount, start, end) \ 1040 __ LoadlW(prev, MemOperand(addr, offset)); \ 1042 __ RotateInsertSelectBits(temp, value, Operand(start), Operand(end), \ 1043 Operand(static_cast<intptr_t>(shift_amount)), \ 1045 __ bin_inst(new_val, prev, temp); \ 1046 __ lr(temp, prev); \ 1047 __ RotateInsertSelectBits(temp, new_val, Operand(start), Operand(end), \ 1048 Operand::Zero(), false); \ 1049 __ CmpAndSwap(prev, temp, MemOperand(addr, offset)); \ 1050 __ bne(&do_cs, Label::kNear); \ 1053 #ifdef V8_TARGET_BIG_ENDIAN 1054 #define ATOMIC_BIN_OP_HALFWORD(bin_inst, index, extract_result) \ 1056 constexpr int offset = -(2 * index); \ 1057 constexpr int shift_amount = 16 - (index * 16); \ 1058 constexpr int start = 48 - shift_amount; \ 1059 constexpr int end = start + 15; \ 1060 ATOMIC_BIN_OP(bin_inst, offset, shift_amount, start, end); \ 1063 #define ATOMIC_BIN_OP_BYTE(bin_inst, index, extract_result) \ 1065 constexpr int offset = -(index); \ 1066 constexpr int shift_amount = 24 - (index * 8); \ 1067 constexpr int start = 56 - shift_amount; \ 1068 constexpr int end = start + 7; \ 1069 ATOMIC_BIN_OP(bin_inst, offset, shift_amount, start, end); \ 1073 #define ATOMIC_BIN_OP_HALFWORD(bin_inst, index, extract_result) \ 1075 constexpr int offset = -(2 * index); \ 1076 constexpr int shift_amount = index * 16; \ 1077 constexpr int start = 48 - shift_amount; \ 1078 constexpr int end = start + 15; \ 1079 ATOMIC_BIN_OP(bin_inst, offset, shift_amount, start, end); \ 1082 #define ATOMIC_BIN_OP_BYTE(bin_inst, index, extract_result) \ 1084 constexpr int offset = -(index); \ 1085 constexpr int shift_amount = index * 8; \ 1086 constexpr int start = 56 - shift_amount; \ 1087 constexpr int end = start + 7; \ 1088 ATOMIC_BIN_OP(bin_inst, offset, shift_amount, start, end); \ 1091 #endif // V8_TARGET_BIG_ENDIAN 1093 #define ASSEMBLE_ATOMIC_BINOP_HALFWORD(bin_inst, extract_result) \ 1095 Register value = i.InputRegister(2); \ 1096 Register result = i.OutputRegister(0); \ 1097 Register prev = i.TempRegister(0); \ 1098 Register new_val = r0; \ 1099 Register addr = r1; \ 1100 Register temp = kScratchReg; \ 1101 AddressingMode mode = kMode_None; \ 1102 MemOperand op = i.MemoryOperand(&mode); \ 1105 __ tmll(addr, Operand(3)); \ 1106 __ b(Condition(2), &two); \ 1108 ATOMIC_BIN_OP_HALFWORD(bin_inst, 0, extract_result); \ 1112 ATOMIC_BIN_OP_HALFWORD(bin_inst, 1, extract_result); \ 1116 #define ASSEMBLE_ATOMIC_BINOP_BYTE(bin_inst, extract_result) \ 1118 Register value = i.InputRegister(2); \ 1119 Register result = i.OutputRegister(0); \ 1120 Register addr = i.TempRegister(0); \ 1121 Register prev = r0; \ 1122 Register new_val = r1; \ 1123 Register temp = kScratchReg; \ 1124 AddressingMode mode = kMode_None; \ 1125 MemOperand op = i.MemoryOperand(&mode); \ 1126 Label done, one, two, three; \ 1128 __ tmll(addr, Operand(3)); \ 1129 __ b(Condition(1), &three); \ 1130 __ b(Condition(2), &two); \ 1131 __ b(Condition(4), &one); \ 1133 ATOMIC_BIN_OP_BYTE(bin_inst, 0, extract_result); \ 1137 ATOMIC_BIN_OP_BYTE(bin_inst, 1, extract_result); \ 1141 ATOMIC_BIN_OP_BYTE(bin_inst, 2, extract_result); \ 1145 ATOMIC_BIN_OP_BYTE(bin_inst, 3, extract_result); \ 1149 #define ASSEMBLE_ATOMIC64_COMP_EXCHANGE_WORD64() \ 1151 Register new_val = i.InputRegister(1); \ 1152 Register output = i.OutputRegister(); \ 1153 Register addr = kScratchReg; \ 1155 AddressingMode mode = kMode_None; \ 1156 MemOperand op = i.MemoryOperand(&mode, &index); \ 1158 __ CmpAndSwap64(output, new_val, MemOperand(addr)); \ 1161 void CodeGenerator::AssembleDeconstructFrame() {
1162 __ LeaveFrame(StackFrame::MANUAL);
1165 void CodeGenerator::AssemblePrepareTailCall() {
1166 if (frame_access_state()->has_frame()) {
1167 __ RestoreFrameStateForTailCall();
1169 frame_access_state()->SetFrameAccessToSP();
1172 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
1175 Register scratch3) {
1176 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
1180 __ LoadP(scratch1, MemOperand(fp, StandardFrameConstants::kContextOffset));
1182 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1187 Register caller_args_count_reg = scratch1;
1188 __ LoadP(caller_args_count_reg,
1189 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1190 __ SmiUntag(caller_args_count_reg);
1192 ParameterCount callee_args_count(args_reg);
1193 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
1200 void FlushPendingPushRegisters(TurboAssembler* tasm,
1201 FrameAccessState* frame_access_state,
1202 ZoneVector<Register>* pending_pushes) {
1203 switch (pending_pushes->size()) {
1207 tasm->Push((*pending_pushes)[0]);
1210 tasm->Push((*pending_pushes)[0], (*pending_pushes)[1]);
1213 tasm->Push((*pending_pushes)[0], (*pending_pushes)[1],
1214 (*pending_pushes)[2]);
1220 frame_access_state->IncreaseSPDelta(pending_pushes->size());
1221 pending_pushes->clear();
1224 void AdjustStackPointerForTailCall(
1225 TurboAssembler* tasm, FrameAccessState* state,
int new_slot_above_sp,
1226 ZoneVector<Register>* pending_pushes =
nullptr,
1227 bool allow_shrinkage =
true) {
1228 int current_sp_offset = state->GetSPToFPSlotCount() +
1229 StandardFrameConstants::kFixedSlotCountAboveFp;
1230 int stack_slot_delta = new_slot_above_sp - current_sp_offset;
1231 if (stack_slot_delta > 0) {
1232 if (pending_pushes !=
nullptr) {
1233 FlushPendingPushRegisters(tasm, state, pending_pushes);
1235 tasm->AddP(sp, sp, Operand(-stack_slot_delta * kPointerSize));
1236 state->IncreaseSPDelta(stack_slot_delta);
1237 }
else if (allow_shrinkage && stack_slot_delta < 0) {
1238 if (pending_pushes !=
nullptr) {
1239 FlushPendingPushRegisters(tasm, state, pending_pushes);
1241 tasm->AddP(sp, sp, Operand(-stack_slot_delta * kPointerSize));
1242 state->IncreaseSPDelta(stack_slot_delta);
1246 void EmitWordLoadPoisoningIfNeeded(CodeGenerator* codegen, Instruction* instr,
1247 S390OperandConverter&
i) {
1248 const MemoryAccessMode access_mode =
1249 static_cast<MemoryAccessMode
>(MiscField::decode(instr->opcode()));
1250 if (access_mode == kMemoryAccessPoisoned) {
1251 Register value =
i.OutputRegister();
1252 codegen->tasm()->AndP(value, kSpeculationPoisonRegister);
1258 void CodeGenerator::AssembleTailCallBeforeGap(Instruction* instr,
1259 int first_unused_stack_slot) {
1260 ZoneVector<MoveOperands*> pushes(zone());
1261 GetPushCompatibleMoves(instr, kRegisterPush, &pushes);
1263 if (!pushes.empty() &&
1264 (LocationOperand::cast(pushes.back()->destination()).index() + 1 ==
1265 first_unused_stack_slot)) {
1266 S390OperandConverter g(
this, instr);
1267 ZoneVector<Register> pending_pushes(zone());
1268 for (
auto move : pushes) {
1269 LocationOperand destination_location(
1270 LocationOperand::cast(move->destination()));
1271 InstructionOperand source(move->source());
1272 AdjustStackPointerForTailCall(
1273 tasm(), frame_access_state(),
1274 destination_location.index() - pending_pushes.size(),
1277 DCHECK(source.IsRegister());
1278 LocationOperand source_location(LocationOperand::cast(source));
1279 pending_pushes.push_back(source_location.GetRegister());
1282 if (pending_pushes.size() == 3) {
1283 FlushPendingPushRegisters(tasm(), frame_access_state(),
1288 FlushPendingPushRegisters(tasm(), frame_access_state(), &pending_pushes);
1290 AdjustStackPointerForTailCall(tasm(), frame_access_state(),
1291 first_unused_stack_slot,
nullptr,
false);
1294 void CodeGenerator::AssembleTailCallAfterGap(Instruction* instr,
1295 int first_unused_stack_slot) {
1296 AdjustStackPointerForTailCall(tasm(), frame_access_state(),
1297 first_unused_stack_slot);
1301 void CodeGenerator::AssembleCodeStartRegisterCheck() {
1302 Register scratch = r1;
1303 __ ComputeCodeStartAddress(scratch);
1304 __ CmpP(scratch, kJavaScriptCallCodeStartRegister);
1305 __ Assert(eq, AbortReason::kWrongFunctionCodeStart);
1315 void CodeGenerator::BailoutIfDeoptimized() {
1316 if (FLAG_debug_code) {
1318 __ ComputeCodeStartAddress(ip);
1319 __ CmpP(ip, kJavaScriptCallCodeStartRegister);
1320 __ Assert(eq, AbortReason::kWrongFunctionCodeStart);
1323 int offset = Code::kCodeDataContainerOffset - Code::kHeaderSize;
1324 __ LoadP(ip, MemOperand(kJavaScriptCallCodeStartRegister, offset));
1326 FieldMemOperand(ip, CodeDataContainer::kKindSpecificFlagsOffset));
1327 __ TestBit(ip, Code::kMarkedForDeoptimizationBit);
1330 DCHECK(!isolate()->ShouldLoadConstantsFromRootList());
1331 Handle<Code> code = isolate()->builtins()->builtin_handle(
1332 Builtins::kCompileLazyDeoptimizedCode);
1333 __ Jump(code, RelocInfo::CODE_TARGET, ne);
1336 void CodeGenerator::GenerateSpeculationPoisonFromCodeStartRegister() {
1337 Register scratch = r1;
1340 __ larl(scratch, ¤t_pc);
1342 __ bind(¤t_pc);
1343 __ SubP(scratch, Operand(__ pc_offset()));
1347 __ LoadImmP(kSpeculationPoisonRegister, Operand::Zero());
1348 __ LoadImmP(r0, Operand(-1));
1349 __ CmpP(kJavaScriptCallCodeStartRegister, scratch);
1350 __ LoadOnConditionP(eq, kSpeculationPoisonRegister, r0);
1353 void CodeGenerator::AssembleRegisterArgumentPoisoning() {
1354 __ AndP(kJSFunctionRegister, kJSFunctionRegister, kSpeculationPoisonRegister);
1355 __ AndP(kContextRegister, kContextRegister, kSpeculationPoisonRegister);
1356 __ AndP(sp, sp, kSpeculationPoisonRegister);
1360 CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
1361 Instruction* instr) {
1362 S390OperandConverter
i(
this, instr);
1363 ArchOpcode opcode = ArchOpcodeField::decode(instr->opcode());
1367 #ifdef V8_TARGET_ARCH_S390X 1368 __ RecordComment(reinterpret_cast<const char*>(
i.InputInt64(0)));
1370 __ RecordComment(reinterpret_cast<const char*>(
i.InputInt32(0)));
1373 case kArchCallCodeObject: {
1374 if (HasRegisterInput(instr, 0)) {
1375 Register reg =
i.InputRegister(0);
1377 HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
1378 reg == kJavaScriptCallCodeStartRegister);
1379 __ AddP(reg, reg, Operand(Code::kHeaderSize - kHeapObjectTag));
1382 __ Call(
i.InputCode(0), RelocInfo::CODE_TARGET);
1384 RecordCallPosition(instr);
1385 frame_access_state()->ClearSPDelta();
1388 case kArchCallWasmFunction: {
1391 if (instr->InputAt(0)->IsImmediate()) {
1392 Constant constant =
i.ToConstant(instr->InputAt(0));
1393 #ifdef V8_TARGET_ARCH_S390X 1394 Address wasm_code =
static_cast<Address
>(constant.ToInt64());
1396 Address wasm_code =
static_cast<Address
>(constant.ToInt32());
1398 __ Call(wasm_code, constant.rmode());
1400 __ Call(
i.InputRegister(0));
1402 RecordCallPosition(instr);
1403 frame_access_state()->ClearSPDelta();
1406 case kArchTailCallCodeObjectFromJSFunction:
1407 case kArchTailCallCodeObject: {
1408 if (opcode == kArchTailCallCodeObjectFromJSFunction) {
1409 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
1410 i.TempRegister(0),
i.TempRegister(1),
1413 if (HasRegisterInput(instr, 0)) {
1414 Register reg =
i.InputRegister(0);
1416 HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
1417 reg == kJavaScriptCallCodeStartRegister);
1418 __ AddP(reg, reg, Operand(Code::kHeaderSize - kHeapObjectTag));
1423 ConstantPoolUnavailableScope constant_pool_unavailable(tasm());
1424 __ Jump(
i.InputCode(0), RelocInfo::CODE_TARGET);
1426 frame_access_state()->ClearSPDelta();
1427 frame_access_state()->SetFrameAccessToDefault();
1430 case kArchTailCallWasm: {
1433 if (instr->InputAt(0)->IsImmediate()) {
1434 Constant constant =
i.ToConstant(instr->InputAt(0));
1435 #ifdef V8_TARGET_ARCH_S390X 1436 Address wasm_code =
static_cast<Address
>(constant.ToInt64());
1438 Address wasm_code =
static_cast<Address
>(constant.ToInt32());
1440 __ Jump(wasm_code, constant.rmode());
1442 __ Jump(
i.InputRegister(0));
1444 frame_access_state()->ClearSPDelta();
1445 frame_access_state()->SetFrameAccessToDefault();
1448 case kArchTailCallAddress: {
1449 CHECK(!instr->InputAt(0)->IsImmediate());
1450 Register reg =
i.InputRegister(0);
1452 HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
1453 reg == kJavaScriptCallCodeStartRegister);
1455 frame_access_state()->ClearSPDelta();
1456 frame_access_state()->SetFrameAccessToDefault();
1459 case kArchCallJSFunction: {
1460 Register func =
i.InputRegister(0);
1461 if (FLAG_debug_code) {
1463 __ LoadP(kScratchReg,
1464 FieldMemOperand(func, JSFunction::kContextOffset));
1465 __ CmpP(cp, kScratchReg);
1466 __ Assert(eq, AbortReason::kWrongFunctionContext);
1468 static_assert(kJavaScriptCallCodeStartRegister == r4,
"ABI mismatch");
1469 __ LoadP(r4, FieldMemOperand(func, JSFunction::kCodeOffset));
1470 __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1472 RecordCallPosition(instr);
1473 frame_access_state()->ClearSPDelta();
1476 case kArchPrepareCallCFunction: {
1477 int const num_parameters = MiscField::decode(instr->opcode());
1478 __ PrepareCallCFunction(num_parameters, kScratchReg);
1480 frame_access_state()->SetFrameAccessToFP();
1483 case kArchSaveCallerRegisters: {
1485 static_cast<SaveFPRegsMode
>(MiscField::decode(instr->opcode()));
1486 DCHECK(fp_mode_ == kDontSaveFPRegs || fp_mode_ == kSaveFPRegs);
1488 int bytes = __ PushCallerSaved(fp_mode_, kReturnRegister0);
1489 DCHECK_EQ(0, bytes % kPointerSize);
1490 DCHECK_EQ(0, frame_access_state()->sp_delta());
1491 frame_access_state()->IncreaseSPDelta(bytes / kPointerSize);
1492 DCHECK(!caller_registers_saved_);
1493 caller_registers_saved_ =
true;
1496 case kArchRestoreCallerRegisters: {
1498 static_cast<SaveFPRegsMode>(MiscField::decode(instr->opcode())));
1499 DCHECK(fp_mode_ == kDontSaveFPRegs || fp_mode_ == kSaveFPRegs);
1501 int bytes = __ PopCallerSaved(fp_mode_, kReturnRegister0);
1502 frame_access_state()->IncreaseSPDelta(-(bytes / kPointerSize));
1503 DCHECK_EQ(0, frame_access_state()->sp_delta());
1504 DCHECK(caller_registers_saved_);
1505 caller_registers_saved_ =
false;
1508 case kArchPrepareTailCall:
1509 AssemblePrepareTailCall();
1511 case kArchCallCFunction: {
1512 int const num_parameters = MiscField::decode(instr->opcode());
1513 if (instr->InputAt(0)->IsImmediate()) {
1514 ExternalReference ref =
i.InputExternalReference(0);
1515 __ CallCFunction(ref, num_parameters);
1517 Register func =
i.InputRegister(0);
1518 __ CallCFunction(func, num_parameters);
1520 frame_access_state()->SetFrameAccessToDefault();
1526 frame_access_state()->ClearSPDelta();
1527 if (caller_registers_saved_) {
1534 __ RequiredStackSizeForCallerSaved(fp_mode_, kReturnRegister0);
1535 frame_access_state()->IncreaseSPDelta(bytes / kPointerSize);
1540 AssembleArchJump(
i.InputRpo(0));
1542 case kArchBinarySearchSwitch:
1543 AssembleArchBinarySearchSwitch(instr);
1545 case kArchLookupSwitch:
1546 AssembleArchLookupSwitch(instr);
1548 case kArchTableSwitch:
1549 AssembleArchTableSwitch(instr);
1551 case kArchDebugAbort:
1552 DCHECK(
i.InputRegister(0) == r3);
1553 if (!frame_access_state()->has_frame()) {
1556 FrameScope scope(tasm(), StackFrame::NONE);
1557 __ Call(isolate()->builtins()->builtin_handle(Builtins::kAbortJS),
1558 RelocInfo::CODE_TARGET);
1560 __ Call(isolate()->builtins()->builtin_handle(Builtins::kAbortJS),
1561 RelocInfo::CODE_TARGET);
1563 __ stop(
"kArchDebugAbort");
1565 case kArchDebugBreak:
1566 __ stop(
"kArchDebugBreak");
1569 case kArchThrowTerminator:
1572 case kArchDeoptimize: {
1573 int deopt_state_id =
1574 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
1575 CodeGenResult result =
1576 AssembleDeoptimizerCall(deopt_state_id, current_source_position_);
1577 if (result != kSuccess)
return result;
1581 AssembleReturn(instr->InputAt(0));
1583 case kArchStackPointer:
1584 __ LoadRR(
i.OutputRegister(), sp);
1586 case kArchFramePointer:
1587 __ LoadRR(
i.OutputRegister(), fp);
1589 case kArchParentFramePointer:
1590 if (frame_access_state()->has_frame()) {
1591 __ LoadP(
i.OutputRegister(), MemOperand(fp, 0));
1593 __ LoadRR(
i.OutputRegister(), fp);
1596 case kArchTruncateDoubleToI:
1597 __ TruncateDoubleToI(isolate(), zone(),
i.OutputRegister(),
1598 i.InputDoubleRegister(0), DetermineStubCallMode());
1600 case kArchStoreWithWriteBarrier: {
1601 RecordWriteMode mode =
1602 static_cast<RecordWriteMode
>(MiscField::decode(instr->opcode()));
1603 Register
object =
i.InputRegister(0);
1604 Register value =
i.InputRegister(2);
1605 Register scratch0 =
i.TempRegister(0);
1606 Register scratch1 =
i.TempRegister(1);
1607 OutOfLineRecordWrite* ool;
1609 AddressingMode addressing_mode =
1610 AddressingModeField::decode(instr->opcode());
1611 if (addressing_mode == kMode_MRI) {
1612 int32_t offset =
i.InputInt32(1);
1614 OutOfLineRecordWrite(
this,
object, offset, value, scratch0,
1615 scratch1, mode, DetermineStubCallMode());
1616 __ StoreP(value, MemOperand(
object, offset));
1618 DCHECK_EQ(kMode_MRR, addressing_mode);
1619 Register offset(
i.InputRegister(1));
1621 OutOfLineRecordWrite(
this,
object, offset, value, scratch0,
1622 scratch1, mode, DetermineStubCallMode());
1623 __ StoreP(value, MemOperand(
object, offset));
1625 __ CheckPageFlag(
object, scratch0,
1626 MemoryChunk::kPointersFromHereAreInterestingMask, ne,
1628 __ bind(ool->exit());
1631 case kArchStackSlot: {
1632 FrameOffset offset =
1633 frame_access_state()->GetFrameOffset(
i.InputInt32(0));
1634 __ AddP(
i.OutputRegister(), offset.from_stack_pointer() ? sp : fp,
1635 Operand(offset.offset()));
1638 case kArchWordPoisonOnSpeculation:
1639 DCHECK_EQ(
i.OutputRegister(),
i.InputRegister(0));
1640 __ AndP(
i.InputRegister(0), kSpeculationPoisonRegister);
1644 __ lpr(
i.OutputRegister(0),
i.InputRegister(0));
1647 __ lpgr(
i.OutputRegister(0),
i.InputRegister(0));
1651 if (CpuFeatures::IsSupported(DISTINCT_OPS)) {
1652 ASSEMBLE_BIN32_OP(RRRInstr(nrk), RM32Instr(And), RIInstr(nilf));
1654 ASSEMBLE_BIN32_OP(RRInstr(nr), RM32Instr(And), RIInstr(nilf));
1658 if (CpuFeatures::IsSupported(DISTINCT_OPS)) {
1659 ASSEMBLE_BIN_OP(RRRInstr(ngrk), RM64Instr(ng), nullInstr);
1661 ASSEMBLE_BIN_OP(RRInstr(ngr), RM64Instr(ng), nullInstr);
1666 if (CpuFeatures::IsSupported(DISTINCT_OPS)) {
1667 ASSEMBLE_BIN32_OP(RRRInstr(ork), RM32Instr(Or), RIInstr(oilf));
1669 ASSEMBLE_BIN32_OP(RRInstr(or_z), RM32Instr(Or), RIInstr(oilf));
1673 if (CpuFeatures::IsSupported(DISTINCT_OPS)) {
1674 ASSEMBLE_BIN_OP(RRRInstr(ogrk), RM64Instr(og), nullInstr);
1676 ASSEMBLE_BIN_OP(RRInstr(ogr), RM64Instr(og), nullInstr);
1681 if (CpuFeatures::IsSupported(DISTINCT_OPS)) {
1682 ASSEMBLE_BIN32_OP(RRRInstr(xrk), RM32Instr(Xor), RIInstr(xilf));
1684 ASSEMBLE_BIN32_OP(RRInstr(xr), RM32Instr(Xor), RIInstr(xilf));
1688 if (CpuFeatures::IsSupported(DISTINCT_OPS)) {
1689 ASSEMBLE_BIN_OP(RRRInstr(xgrk), RM64Instr(xg), nullInstr);
1691 ASSEMBLE_BIN_OP(RRInstr(xgr), RM64Instr(xg), nullInstr);
1694 case kS390_ShiftLeft32:
1696 if (CpuFeatures::IsSupported(DISTINCT_OPS)) {
1697 ASSEMBLE_BIN32_OP(RRRInstr(ShiftLeft), nullInstr, RRIInstr(ShiftLeft));
1699 ASSEMBLE_BIN32_OP(RRInstr(sll), nullInstr, RIInstr(sll));
1702 case kS390_ShiftLeft64:
1703 ASSEMBLE_BIN_OP(RRRInstr(sllg), nullInstr, RRIInstr(sllg));
1705 case kS390_ShiftRight32:
1707 if (CpuFeatures::IsSupported(DISTINCT_OPS)) {
1708 ASSEMBLE_BIN32_OP(RRRInstr(srlk), nullInstr, RRIInstr(srlk));
1710 ASSEMBLE_BIN32_OP(RRInstr(srl), nullInstr, RIInstr(srl));
1713 case kS390_ShiftRight64:
1714 ASSEMBLE_BIN_OP(RRRInstr(srlg), nullInstr, RRIInstr(srlg));
1716 case kS390_ShiftRightArith32:
1718 if (CpuFeatures::IsSupported(DISTINCT_OPS)) {
1719 ASSEMBLE_BIN32_OP(RRRInstr(srak), nullInstr, RRIInstr(srak));
1721 ASSEMBLE_BIN32_OP(RRInstr(sra), nullInstr, RIInstr(sra));
1724 case kS390_ShiftRightArith64:
1725 ASSEMBLE_BIN_OP(RRRInstr(srag), nullInstr, RRIInstr(srag));
1727 #if !V8_TARGET_ARCH_S390X 1733 __ AddLogical32(
i.OutputRegister(0),
i.InputRegister(0),
1734 i.InputRegister(2));
1735 __ AddLogicalWithCarry32(
i.OutputRegister(1),
i.InputRegister(1),
1736 i.InputRegister(3));
1743 __ SubLogical32(
i.OutputRegister(0),
i.InputRegister(0),
1744 i.InputRegister(2));
1745 __ SubLogicalWithBorrow32(
i.OutputRegister(1),
i.InputRegister(1),
1746 i.InputRegister(3));
1753 __ sllg(r0,
i.InputRegister(1), Operand(32));
1754 __ sllg(r1,
i.InputRegister(3), Operand(32));
1755 __ lr(r0,
i.InputRegister(0));
1756 __ lr(r1,
i.InputRegister(2));
1758 __ lr(
i.OutputRegister(0), r1);
1759 __ srag(
i.OutputRegister(1), r1, Operand(32));
1761 case kS390_ShiftLeftPair: {
1762 Register second_output =
1763 instr->OutputCount() >= 2 ?
i.OutputRegister(1) :
i.TempRegister(0);
1764 if (instr->InputAt(2)->IsImmediate()) {
1765 __ ShiftLeftPair(
i.OutputRegister(0), second_output,
i.InputRegister(0),
1766 i.InputRegister(1),
i.InputInt32(2));
1768 __ ShiftLeftPair(
i.OutputRegister(0), second_output,
i.InputRegister(0),
1769 i.InputRegister(1), kScratchReg,
i.InputRegister(2));
1773 case kS390_ShiftRightPair: {
1774 Register second_output =
1775 instr->OutputCount() >= 2 ?
i.OutputRegister(1) :
i.TempRegister(0);
1776 if (instr->InputAt(2)->IsImmediate()) {
1777 __ ShiftRightPair(
i.OutputRegister(0), second_output,
1778 i.InputRegister(0),
i.InputRegister(1),
1781 __ ShiftRightPair(
i.OutputRegister(0), second_output,
1782 i.InputRegister(0),
i.InputRegister(1), kScratchReg,
1783 i.InputRegister(2));
1787 case kS390_ShiftRightArithPair: {
1788 Register second_output =
1789 instr->OutputCount() >= 2 ?
i.OutputRegister(1) :
i.TempRegister(0);
1790 if (instr->InputAt(2)->IsImmediate()) {
1791 __ ShiftRightArithPair(
i.OutputRegister(0), second_output,
1792 i.InputRegister(0),
i.InputRegister(1),
1795 __ ShiftRightArithPair(
i.OutputRegister(0), second_output,
1796 i.InputRegister(0),
i.InputRegister(1),
1797 kScratchReg,
i.InputRegister(2));
1802 case kS390_RotRight32: {
1804 if (HasRegisterInput(instr, 1)) {
1805 __ LoadComplementRR(kScratchReg,
i.InputRegister(1));
1806 __ rll(
i.OutputRegister(),
i.InputRegister(0), kScratchReg);
1808 __ rll(
i.OutputRegister(),
i.InputRegister(0),
1809 Operand(32 -
i.InputInt32(1)));
1811 CHECK_AND_ZERO_EXT_OUTPUT(2);
1814 case kS390_RotRight64:
1815 if (HasRegisterInput(instr, 1)) {
1816 __ lcgr(kScratchReg,
i.InputRegister(1));
1817 __ rllg(
i.OutputRegister(),
i.InputRegister(0), kScratchReg);
1819 DCHECK(HasImmediateInput(instr, 1));
1820 __ rllg(
i.OutputRegister(),
i.InputRegister(0),
1821 Operand(64 -
i.InputInt32(1)));
1825 case kS390_RotLeftAndClear64:
1826 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT)) {
1827 int shiftAmount =
i.InputInt32(1);
1828 int endBit = 63 - shiftAmount;
1829 int startBit = 63 -
i.InputInt32(2);
1830 __ RotateInsertSelectBits(
i.OutputRegister(),
i.InputRegister(0),
1831 Operand(startBit), Operand(endBit),
1832 Operand(shiftAmount),
true);
1834 int shiftAmount =
i.InputInt32(1);
1835 int clearBit = 63 -
i.InputInt32(2);
1836 __ rllg(
i.OutputRegister(),
i.InputRegister(0), Operand(shiftAmount));
1837 __ sllg(
i.OutputRegister(),
i.OutputRegister(), Operand(clearBit));
1838 __ srlg(
i.OutputRegister(),
i.OutputRegister(),
1839 Operand(clearBit + shiftAmount));
1840 __ sllg(
i.OutputRegister(),
i.OutputRegister(), Operand(shiftAmount));
1843 case kS390_RotLeftAndClearLeft64:
1844 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT)) {
1845 int shiftAmount =
i.InputInt32(1);
1847 int startBit = 63 -
i.InputInt32(2);
1848 __ RotateInsertSelectBits(
i.OutputRegister(),
i.InputRegister(0),
1849 Operand(startBit), Operand(endBit),
1850 Operand(shiftAmount),
true);
1852 int shiftAmount =
i.InputInt32(1);
1853 int clearBit = 63 -
i.InputInt32(2);
1854 __ rllg(
i.OutputRegister(),
i.InputRegister(0), Operand(shiftAmount));
1855 __ sllg(
i.OutputRegister(),
i.OutputRegister(), Operand(clearBit));
1856 __ srlg(
i.OutputRegister(),
i.OutputRegister(), Operand(clearBit));
1859 case kS390_RotLeftAndClearRight64:
1860 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT)) {
1861 int shiftAmount =
i.InputInt32(1);
1862 int endBit = 63 -
i.InputInt32(2);
1864 __ RotateInsertSelectBits(
i.OutputRegister(),
i.InputRegister(0),
1865 Operand(startBit), Operand(endBit),
1866 Operand(shiftAmount),
true);
1868 int shiftAmount =
i.InputInt32(1);
1869 int clearBit =
i.InputInt32(2);
1870 __ rllg(
i.OutputRegister(),
i.InputRegister(0), Operand(shiftAmount));
1871 __ srlg(
i.OutputRegister(),
i.OutputRegister(), Operand(clearBit));
1872 __ sllg(
i.OutputRegister(),
i.OutputRegister(), Operand(clearBit));
1877 if (CpuFeatures::IsSupported(DISTINCT_OPS)) {
1878 ASSEMBLE_BIN32_OP(RRRInstr(ark), RM32Instr(Add32), RRIInstr(Add32));
1880 ASSEMBLE_BIN32_OP(RRInstr(ar), RM32Instr(Add32), RIInstr(Add32));
1885 if (CpuFeatures::IsSupported(DISTINCT_OPS)) {
1886 ASSEMBLE_BIN_OP(RRRInstr(agrk), RM64Instr(ag), RRIInstr(AddP));
1888 ASSEMBLE_BIN_OP(RRInstr(agr), RM64Instr(ag), RIInstr(agfi));
1891 case kS390_AddFloat:
1892 ASSEMBLE_BIN_OP(DDInstr(aebr), DMTInstr(AddFloat32), nullInstr);
1894 case kS390_AddDouble:
1895 ASSEMBLE_BIN_OP(DDInstr(adbr), DMTInstr(AddFloat64), nullInstr);
1899 if (CpuFeatures::IsSupported(DISTINCT_OPS)) {
1900 ASSEMBLE_BIN32_OP(RRRInstr(srk), RM32Instr(Sub32), RRIInstr(Sub32));
1902 ASSEMBLE_BIN32_OP(RRInstr(sr), RM32Instr(Sub32), RIInstr(Sub32));
1906 if (CpuFeatures::IsSupported(DISTINCT_OPS)) {
1907 ASSEMBLE_BIN_OP(RRRInstr(sgrk), RM64Instr(sg), RRIInstr(SubP));
1909 ASSEMBLE_BIN_OP(RRInstr(sgr), RM64Instr(sg), RIInstr(SubP));
1912 case kS390_SubFloat:
1913 ASSEMBLE_BIN_OP(DDInstr(sebr), DMTInstr(SubFloat32), nullInstr);
1915 case kS390_SubDouble:
1916 ASSEMBLE_BIN_OP(DDInstr(sdbr), DMTInstr(SubFloat64), nullInstr);
1920 if (CpuFeatures::IsSupported(MISC_INSTR_EXT2)) {
1921 ASSEMBLE_BIN32_OP(RRRInstr(msrkc), RM32Instr(msc), RIInstr(Mul32));
1923 ASSEMBLE_BIN32_OP(RRInstr(Mul32), RM32Instr(Mul32), RIInstr(Mul32));
1926 case kS390_Mul32WithOverflow:
1928 ASSEMBLE_BIN32_OP(RRRInstr(Mul32WithOverflowIfCCUnequal),
1929 RRM32Instr(Mul32WithOverflowIfCCUnequal),
1930 RRIInstr(Mul32WithOverflowIfCCUnequal));
1933 ASSEMBLE_BIN_OP(RRInstr(Mul64), RM64Instr(Mul64), RIInstr(Mul64));
1935 case kS390_MulHigh32:
1937 ASSEMBLE_BIN_OP(RRRInstr(MulHigh32), RRM32Instr(MulHigh32),
1938 RRIInstr(MulHigh32));
1940 case kS390_MulHighU32:
1942 ASSEMBLE_BIN_OP(RRRInstr(MulHighU32), RRM32Instr(MulHighU32),
1943 RRIInstr(MulHighU32));
1945 case kS390_MulFloat:
1946 ASSEMBLE_BIN_OP(DDInstr(meebr), DMTInstr(MulFloat32), nullInstr);
1948 case kS390_MulDouble:
1949 ASSEMBLE_BIN_OP(DDInstr(mdbr), DMTInstr(MulFloat64), nullInstr);
1952 ASSEMBLE_BIN_OP(RRRInstr(Div64), RRM64Instr(Div64), nullInstr);
1956 ASSEMBLE_BIN_OP(RRRInstr(Div32), RRM32Instr(Div32), nullInstr);
1960 ASSEMBLE_BIN_OP(RRRInstr(DivU64), RRM64Instr(DivU64), nullInstr);
1962 case kS390_DivU32: {
1964 ASSEMBLE_BIN_OP(RRRInstr(DivU32), RRM32Instr(DivU32), nullInstr);
1967 case kS390_DivFloat:
1968 ASSEMBLE_BIN_OP(DDInstr(debr), DMTInstr(DivFloat32), nullInstr);
1970 case kS390_DivDouble:
1971 ASSEMBLE_BIN_OP(DDInstr(ddbr), DMTInstr(DivFloat64), nullInstr);
1975 ASSEMBLE_BIN_OP(RRRInstr(Mod32), RRM32Instr(Mod32), nullInstr);
1979 ASSEMBLE_BIN_OP(RRRInstr(ModU32), RRM32Instr(ModU32), nullInstr);
1982 ASSEMBLE_BIN_OP(RRRInstr(Mod64), RRM64Instr(Mod64), nullInstr);
1985 ASSEMBLE_BIN_OP(RRRInstr(ModU64), RRM64Instr(ModU64), nullInstr);
1987 case kS390_AbsFloat:
1988 __ lpebr(
i.OutputDoubleRegister(),
i.InputDoubleRegister(0));
1990 case kS390_SqrtFloat:
1991 ASSEMBLE_UNARY_OP(D_DInstr(sqebr), nullInstr, nullInstr);
1993 case kS390_SqrtDouble:
1994 ASSEMBLE_UNARY_OP(D_DInstr(sqdbr), nullInstr, nullInstr);
1996 case kS390_FloorFloat:
1997 __ fiebra(v8::internal::Assembler::FIDBRA_ROUND_TOWARD_NEG_INF,
1998 i.OutputDoubleRegister(),
i.InputDoubleRegister(0));
2000 case kS390_CeilFloat:
2001 __ fiebra(v8::internal::Assembler::FIDBRA_ROUND_TOWARD_POS_INF,
2002 i.OutputDoubleRegister(),
i.InputDoubleRegister(0));
2004 case kS390_TruncateFloat:
2005 __ fiebra(v8::internal::Assembler::FIDBRA_ROUND_TOWARD_0,
2006 i.OutputDoubleRegister(),
i.InputDoubleRegister(0));
2009 case kS390_ModDouble:
2010 ASSEMBLE_FLOAT_MODULO();
2012 case kIeee754Float64Acos:
2013 ASSEMBLE_IEEE754_UNOP(acos);
2015 case kIeee754Float64Acosh:
2016 ASSEMBLE_IEEE754_UNOP(acosh);
2018 case kIeee754Float64Asin:
2019 ASSEMBLE_IEEE754_UNOP(asin);
2021 case kIeee754Float64Asinh:
2022 ASSEMBLE_IEEE754_UNOP(asinh);
2024 case kIeee754Float64Atanh:
2025 ASSEMBLE_IEEE754_UNOP(atanh);
2027 case kIeee754Float64Atan:
2028 ASSEMBLE_IEEE754_UNOP(atan);
2030 case kIeee754Float64Atan2:
2031 ASSEMBLE_IEEE754_BINOP(atan2);
2033 case kIeee754Float64Tan:
2034 ASSEMBLE_IEEE754_UNOP(tan);
2036 case kIeee754Float64Tanh:
2037 ASSEMBLE_IEEE754_UNOP(tanh);
2039 case kIeee754Float64Cbrt:
2040 ASSEMBLE_IEEE754_UNOP(cbrt);
2042 case kIeee754Float64Sin:
2043 ASSEMBLE_IEEE754_UNOP(sin);
2045 case kIeee754Float64Sinh:
2046 ASSEMBLE_IEEE754_UNOP(sinh);
2048 case kIeee754Float64Cos:
2049 ASSEMBLE_IEEE754_UNOP(cos);
2051 case kIeee754Float64Cosh:
2052 ASSEMBLE_IEEE754_UNOP(cosh);
2054 case kIeee754Float64Exp:
2055 ASSEMBLE_IEEE754_UNOP(exp);
2057 case kIeee754Float64Expm1:
2058 ASSEMBLE_IEEE754_UNOP(expm1);
2060 case kIeee754Float64Log:
2061 ASSEMBLE_IEEE754_UNOP(log);
2063 case kIeee754Float64Log1p:
2064 ASSEMBLE_IEEE754_UNOP(log1p);
2066 case kIeee754Float64Log2:
2067 ASSEMBLE_IEEE754_UNOP(log2);
2069 case kIeee754Float64Log10:
2070 ASSEMBLE_IEEE754_UNOP(log10);
2072 case kIeee754Float64Pow: {
2073 __ Call(BUILTIN_CODE(isolate(), MathPowInternal), RelocInfo::CODE_TARGET);
2078 __ lcr(
i.OutputRegister(),
i.InputRegister(0));
2079 CHECK_AND_ZERO_EXT_OUTPUT(1);
2082 __ lcgr(
i.OutputRegister(),
i.InputRegister(0));
2084 case kS390_MaxFloat:
2085 ASSEMBLE_FLOAT_MAX();
2087 case kS390_MaxDouble:
2088 ASSEMBLE_DOUBLE_MAX();
2090 case kS390_MinFloat:
2091 ASSEMBLE_FLOAT_MIN();
2093 case kS390_MinDouble:
2094 ASSEMBLE_DOUBLE_MIN();
2096 case kS390_AbsDouble:
2097 __ lpdbr(
i.OutputDoubleRegister(),
i.InputDoubleRegister(0));
2099 case kS390_FloorDouble:
2100 __ fidbra(v8::internal::Assembler::FIDBRA_ROUND_TOWARD_NEG_INF,
2101 i.OutputDoubleRegister(),
i.InputDoubleRegister(0));
2103 case kS390_CeilDouble:
2104 __ fidbra(v8::internal::Assembler::FIDBRA_ROUND_TOWARD_POS_INF,
2105 i.OutputDoubleRegister(),
i.InputDoubleRegister(0));
2107 case kS390_TruncateDouble:
2108 __ fidbra(v8::internal::Assembler::FIDBRA_ROUND_TOWARD_0,
2109 i.OutputDoubleRegister(),
i.InputDoubleRegister(0));
2111 case kS390_RoundDouble:
2112 __ fidbra(v8::internal::Assembler::FIDBRA_ROUND_TO_NEAREST_AWAY_FROM_0,
2113 i.OutputDoubleRegister(),
i.InputDoubleRegister(0));
2115 case kS390_NegFloat:
2116 ASSEMBLE_UNARY_OP(D_DInstr(lcebr), nullInstr, nullInstr);
2118 case kS390_NegDouble:
2119 ASSEMBLE_UNARY_OP(D_DInstr(lcdbr), nullInstr, nullInstr);
2121 case kS390_Cntlz32: {
2122 __ llgfr(
i.OutputRegister(),
i.InputRegister(0));
2123 __ flogr(r0,
i.OutputRegister());
2124 __ Add32(
i.OutputRegister(), r0, Operand(-32));
2128 #if V8_TARGET_ARCH_S390X 2129 case kS390_Cntlz64: {
2130 __ flogr(r0,
i.InputRegister(0));
2131 __ LoadRR(
i.OutputRegister(), r0);
2135 case kS390_Popcnt32:
2136 __ Popcnt32(
i.OutputRegister(),
i.InputRegister(0));
2138 #if V8_TARGET_ARCH_S390X 2139 case kS390_Popcnt64:
2140 __ Popcnt64(
i.OutputRegister(),
i.InputRegister(0));
2144 ASSEMBLE_COMPARE32(Cmp32, CmpLogical32);
2146 #if V8_TARGET_ARCH_S390X 2148 ASSEMBLE_COMPARE(CmpP, CmpLogicalP);
2151 case kS390_CmpFloat:
2152 ASSEMBLE_FLOAT_COMPARE(cebr, ceb, ley);
2155 case kS390_CmpDouble:
2156 ASSEMBLE_FLOAT_COMPARE(cdbr, cdb, ldy);
2160 if (HasRegisterInput(instr, 1)) {
2161 __ And(r0,
i.InputRegister(0),
i.InputRegister(1));
2164 Operand opnd =
i.InputImmediate(1);
2165 if (is_uint16(opnd.immediate())) {
2166 __ tmll(
i.InputRegister(0), opnd);
2168 __ lr(r0,
i.InputRegister(0));
2174 if (HasRegisterInput(instr, 1)) {
2175 __ AndP(r0,
i.InputRegister(0),
i.InputRegister(1));
2177 Operand opnd =
i.InputImmediate(1);
2178 if (is_uint16(opnd.immediate())) {
2179 __ tmll(
i.InputRegister(0), opnd);
2181 __ AndP(r0,
i.InputRegister(0), opnd);
2185 case kS390_Float64SilenceNaN: {
2186 DoubleRegister value =
i.InputDoubleRegister(0);
2187 DoubleRegister result =
i.OutputDoubleRegister();
2188 __ CanonicalizeNaN(result, value);
2191 case kS390_StackClaim: {
2192 int num_slots =
i.InputInt32(0);
2193 __ lay(sp, MemOperand(sp, -num_slots * kPointerSize));
2194 frame_access_state()->IncreaseSPDelta(num_slots);
2198 if (instr->InputAt(0)->IsFPRegister()) {
2199 LocationOperand* op = LocationOperand::cast(instr->InputAt(0));
2200 if (op->representation() == MachineRepresentation::kFloat64) {
2201 __ lay(sp, MemOperand(sp, -kDoubleSize));
2202 __ StoreDouble(
i.InputDoubleRegister(0), MemOperand(sp));
2203 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
2205 DCHECK_EQ(MachineRepresentation::kFloat32, op->representation());
2206 __ lay(sp, MemOperand(sp, -kPointerSize));
2207 __ StoreFloat32(
i.InputDoubleRegister(0), MemOperand(sp));
2208 frame_access_state()->IncreaseSPDelta(1);
2211 __ Push(
i.InputRegister(0));
2212 frame_access_state()->IncreaseSPDelta(1);
2215 case kS390_PushFrame: {
2216 int num_slots =
i.InputInt32(1);
2217 __ lay(sp, MemOperand(sp, -num_slots * kPointerSize));
2218 if (instr->InputAt(0)->IsFPRegister()) {
2219 LocationOperand* op = LocationOperand::cast(instr->InputAt(0));
2220 if (op->representation() == MachineRepresentation::kFloat64) {
2221 __ StoreDouble(
i.InputDoubleRegister(0), MemOperand(sp));
2223 DCHECK_EQ(MachineRepresentation::kFloat32, op->representation());
2224 __ StoreFloat32(
i.InputDoubleRegister(0), MemOperand(sp));
2227 __ StoreP(
i.InputRegister(0), MemOperand(sp));
2231 case kS390_StoreToStackSlot: {
2232 int slot =
i.InputInt32(1);
2233 if (instr->InputAt(0)->IsFPRegister()) {
2234 LocationOperand* op = LocationOperand::cast(instr->InputAt(0));
2235 if (op->representation() == MachineRepresentation::kFloat64) {
2236 __ StoreDouble(
i.InputDoubleRegister(0),
2237 MemOperand(sp, slot * kPointerSize));
2239 DCHECK_EQ(MachineRepresentation::kFloat32, op->representation());
2240 __ StoreFloat32(
i.InputDoubleRegister(0),
2241 MemOperand(sp, slot * kPointerSize));
2244 __ StoreP(
i.InputRegister(0), MemOperand(sp, slot * kPointerSize));
2248 case kS390_SignExtendWord8ToInt32:
2249 __ lbr(
i.OutputRegister(),
i.InputRegister(0));
2250 CHECK_AND_ZERO_EXT_OUTPUT(1);
2252 case kS390_SignExtendWord16ToInt32:
2253 __ lhr(
i.OutputRegister(),
i.InputRegister(0));
2254 CHECK_AND_ZERO_EXT_OUTPUT(1);
2256 case kS390_SignExtendWord8ToInt64:
2257 __ lgbr(
i.OutputRegister(),
i.InputRegister(0));
2259 case kS390_SignExtendWord16ToInt64:
2260 __ lghr(
i.OutputRegister(),
i.InputRegister(0));
2262 case kS390_SignExtendWord32ToInt64:
2263 __ lgfr(
i.OutputRegister(),
i.InputRegister(0));
2265 case kS390_Uint32ToUint64:
2267 __ llgfr(
i.OutputRegister(),
i.InputRegister(0));
2269 case kS390_Int64ToInt32:
2271 __ lgfr(
i.OutputRegister(),
i.InputRegister(0));
2274 case kS390_Int64ToFloat32:
2275 __ ConvertInt64ToFloat(
i.OutputDoubleRegister(),
i.InputRegister(0));
2277 case kS390_Int64ToDouble:
2278 __ ConvertInt64ToDouble(
i.OutputDoubleRegister(),
i.InputRegister(0));
2280 case kS390_Uint64ToFloat32:
2281 __ ConvertUnsignedInt64ToFloat(
i.OutputDoubleRegister(),
2282 i.InputRegister(0));
2284 case kS390_Uint64ToDouble:
2285 __ ConvertUnsignedInt64ToDouble(
i.OutputDoubleRegister(),
2286 i.InputRegister(0));
2288 case kS390_Int32ToFloat32:
2289 __ ConvertIntToFloat(
i.OutputDoubleRegister(),
i.InputRegister(0));
2291 case kS390_Int32ToDouble:
2292 __ ConvertIntToDouble(
i.OutputDoubleRegister(),
i.InputRegister(0));
2294 case kS390_Uint32ToFloat32:
2295 __ ConvertUnsignedIntToFloat(
i.OutputDoubleRegister(),
2296 i.InputRegister(0));
2298 case kS390_Uint32ToDouble:
2299 __ ConvertUnsignedIntToDouble(
i.OutputDoubleRegister(),
2300 i.InputRegister(0));
2302 case kS390_DoubleToInt32: {
2304 __ ConvertDoubleToInt32(
i.OutputRegister(0),
i.InputDoubleRegister(0),
2306 __ b(Condition(0xE), &done, Label::kNear);
2307 __ lghi(
i.OutputRegister(0), Operand::Zero());
2311 case kS390_DoubleToUint32: {
2313 __ ConvertDoubleToUnsignedInt32(
i.OutputRegister(0),
2314 i.InputDoubleRegister(0));
2315 __ b(Condition(0xE), &done, Label::kNear);
2316 __ lghi(
i.OutputRegister(0), Operand::Zero());
2320 case kS390_DoubleToInt64: {
2322 if (
i.OutputCount() > 1) {
2323 __ lghi(
i.OutputRegister(1), Operand(1));
2325 __ ConvertDoubleToInt64(
i.OutputRegister(0),
i.InputDoubleRegister(0));
2326 __ b(Condition(0xE), &done, Label::kNear);
2327 if (
i.OutputCount() > 1) {
2328 __ lghi(
i.OutputRegister(1), Operand::Zero());
2330 __ lghi(
i.OutputRegister(0), Operand::Zero());
2335 case kS390_DoubleToUint64: {
2337 if (
i.OutputCount() > 1) {
2338 __ lghi(
i.OutputRegister(1), Operand(1));
2340 __ ConvertDoubleToUnsignedInt64(
i.OutputRegister(0),
2341 i.InputDoubleRegister(0));
2342 __ b(Condition(0xE), &done, Label::kNear);
2343 if (
i.OutputCount() > 1) {
2344 __ lghi(
i.OutputRegister(1), Operand::Zero());
2346 __ lghi(
i.OutputRegister(0), Operand::Zero());
2351 case kS390_Float32ToInt32: {
2353 __ ConvertFloat32ToInt32(
i.OutputRegister(0),
i.InputDoubleRegister(0),
2355 __ b(Condition(0xE), &done, Label::kNear);
2356 __ lghi(
i.OutputRegister(0), Operand::Zero());
2360 case kS390_Float32ToUint32: {
2362 __ ConvertFloat32ToUnsignedInt32(
i.OutputRegister(0),
2363 i.InputDoubleRegister(0));
2364 __ b(Condition(0xE), &done, Label::kNear);
2365 __ lghi(
i.OutputRegister(0), Operand::Zero());
2369 case kS390_Float32ToUint64: {
2371 if (
i.OutputCount() > 1) {
2372 __ lghi(
i.OutputRegister(1), Operand(1));
2374 __ ConvertFloat32ToUnsignedInt64(
i.OutputRegister(0),
2375 i.InputDoubleRegister(0));
2376 __ b(Condition(0xE), &done, Label::kNear);
2377 if (
i.OutputCount() > 1) {
2378 __ lghi(
i.OutputRegister(1), Operand::Zero());
2380 __ lghi(
i.OutputRegister(0), Operand::Zero());
2385 case kS390_Float32ToInt64: {
2387 if (
i.OutputCount() > 1) {
2388 __ lghi(
i.OutputRegister(1), Operand(1));
2390 __ ConvertFloat32ToInt64(
i.OutputRegister(0),
i.InputDoubleRegister(0));
2391 __ b(Condition(0xE), &done, Label::kNear);
2392 if (
i.OutputCount() > 1) {
2393 __ lghi(
i.OutputRegister(1), Operand::Zero());
2395 __ lghi(
i.OutputRegister(0), Operand::Zero());
2400 case kS390_DoubleToFloat32:
2401 ASSEMBLE_UNARY_OP(D_DInstr(ledbr), nullInstr, nullInstr);
2403 case kS390_Float32ToDouble:
2404 ASSEMBLE_UNARY_OP(D_DInstr(ldebr), D_MTInstr(LoadFloat32ToDouble),
2407 case kS390_DoubleExtractLowWord32:
2408 __ lgdr(
i.OutputRegister(),
i.InputDoubleRegister(0));
2409 __ llgfr(
i.OutputRegister(),
i.OutputRegister());
2411 case kS390_DoubleExtractHighWord32:
2412 __ lgdr(
i.OutputRegister(),
i.InputDoubleRegister(0));
2413 __ srlg(
i.OutputRegister(),
i.OutputRegister(), Operand(32));
2415 case kS390_DoubleInsertLowWord32:
2416 __ lgdr(kScratchReg,
i.InputDoubleRegister(0));
2417 __ lr(kScratchReg,
i.InputRegister(1));
2418 __ ldgr(
i.OutputDoubleRegister(), kScratchReg);
2420 case kS390_DoubleInsertHighWord32:
2421 __ sllg(kScratchReg,
i.InputRegister(1), Operand(32));
2422 __ lgdr(r0,
i.InputDoubleRegister(0));
2423 __ lr(kScratchReg, r0);
2424 __ ldgr(
i.OutputDoubleRegister(), kScratchReg);
2426 case kS390_DoubleConstruct:
2427 __ sllg(kScratchReg,
i.InputRegister(0), Operand(32));
2428 __ lr(kScratchReg,
i.InputRegister(1));
2431 __ ldgr(
i.OutputDoubleRegister(), kScratchReg);
2433 case kS390_LoadWordS8:
2434 ASSEMBLE_LOAD_INTEGER(LoadB);
2435 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
2437 case kS390_BitcastFloat32ToInt32:
2438 ASSEMBLE_UNARY_OP(R_DInstr(MovFloatToInt), R_MInstr(LoadlW), nullInstr);
2440 case kS390_BitcastInt32ToFloat32:
2441 __ MovIntToFloat(
i.OutputDoubleRegister(),
i.InputRegister(0));
2443 #if V8_TARGET_ARCH_S390X 2444 case kS390_BitcastDoubleToInt64:
2445 __ MovDoubleToInt64(
i.OutputRegister(),
i.InputDoubleRegister(0));
2447 case kS390_BitcastInt64ToDouble:
2448 __ MovInt64ToDouble(
i.OutputDoubleRegister(),
i.InputRegister(0));
2451 case kS390_LoadWordU8:
2452 ASSEMBLE_LOAD_INTEGER(LoadlB);
2453 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
2455 case kS390_LoadWordU16:
2456 ASSEMBLE_LOAD_INTEGER(LoadLogicalHalfWordP);
2457 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
2459 case kS390_LoadWordS16:
2460 ASSEMBLE_LOAD_INTEGER(LoadHalfWordP);
2461 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
2463 case kS390_LoadWordU32:
2464 ASSEMBLE_LOAD_INTEGER(LoadlW);
2465 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
2467 case kS390_LoadWordS32:
2468 ASSEMBLE_LOAD_INTEGER(LoadW);
2469 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
2471 case kS390_LoadReverse16:
2472 ASSEMBLE_LOAD_INTEGER(lrvh);
2473 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
2475 case kS390_LoadReverse32:
2476 ASSEMBLE_LOAD_INTEGER(lrv);
2477 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
2479 case kS390_LoadReverse64:
2480 ASSEMBLE_LOAD_INTEGER(lrvg);
2481 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
2483 case kS390_LoadReverse16RR:
2484 __ lrvr(
i.OutputRegister(),
i.InputRegister(0));
2485 __ rll(
i.OutputRegister(),
i.OutputRegister(), Operand(16));
2487 case kS390_LoadReverse32RR:
2488 __ lrvr(
i.OutputRegister(),
i.InputRegister(0));
2490 case kS390_LoadReverse64RR:
2491 __ lrvgr(
i.OutputRegister(),
i.InputRegister(0));
2493 case kS390_LoadWord64:
2494 ASSEMBLE_LOAD_INTEGER(lg);
2495 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
2497 case kS390_LoadAndTestWord32: {
2498 ASSEMBLE_LOADANDTEST32(ltr, lt_z);
2501 case kS390_LoadAndTestWord64: {
2502 ASSEMBLE_LOADANDTEST64(ltgr, ltg);
2505 case kS390_LoadFloat32:
2506 ASSEMBLE_LOAD_FLOAT(LoadFloat32);
2508 case kS390_LoadDouble:
2509 ASSEMBLE_LOAD_FLOAT(LoadDouble);
2511 case kS390_StoreWord8:
2512 ASSEMBLE_STORE_INTEGER(StoreByte);
2514 case kS390_StoreWord16:
2515 ASSEMBLE_STORE_INTEGER(StoreHalfWord);
2517 case kS390_StoreWord32:
2518 ASSEMBLE_STORE_INTEGER(StoreW);
2520 #if V8_TARGET_ARCH_S390X 2521 case kS390_StoreWord64:
2522 ASSEMBLE_STORE_INTEGER(StoreP);
2525 case kS390_StoreReverse16:
2526 ASSEMBLE_STORE_INTEGER(strvh);
2528 case kS390_StoreReverse32:
2529 ASSEMBLE_STORE_INTEGER(strv);
2531 case kS390_StoreReverse64:
2532 ASSEMBLE_STORE_INTEGER(strvg);
2534 case kS390_StoreFloat32:
2535 ASSEMBLE_STORE_FLOAT32();
2537 case kS390_StoreDouble:
2538 ASSEMBLE_STORE_DOUBLE();
2541 __ lay(
i.OutputRegister(),
i.MemoryOperand());
2545 #define ATOMIC_EXCHANGE(start, end, shift_amount, offset) \ 2548 __ LoadlW(output, MemOperand(r1, offset)); \ 2550 __ llgfr(r0, output); \ 2551 __ RotateInsertSelectBits(r0, value, Operand(start), Operand(end), \ 2552 Operand(shift_amount), false); \ 2553 __ csy(output, r0, MemOperand(r1, offset)); \ 2554 __ bne(&do_cs, Label::kNear); \ 2555 __ srl(output, Operand(shift_amount)); \ 2557 #ifdef V8_TARGET_BIG_ENDIAN 2558 #define ATOMIC_EXCHANGE_BYTE(i) \ 2560 constexpr int idx = (i); \ 2561 static_assert(idx <= 3 && idx >= 0, "idx is out of range!"); \ 2562 constexpr int start = 32 + 8 * idx; \ 2563 constexpr int end = start + 7; \ 2564 constexpr int shift_amount = (3 - idx) * 8; \ 2565 ATOMIC_EXCHANGE(start, end, shift_amount, -idx); \ 2567 #define ATOMIC_EXCHANGE_HALFWORD(i) \ 2569 constexpr int idx = (i); \ 2570 static_assert(idx <= 1 && idx >= 0, "idx is out of range!"); \ 2571 constexpr int start = 32 + 16 * idx; \ 2572 constexpr int end = start + 15; \ 2573 constexpr int shift_amount = (1 - idx) * 16; \ 2574 ATOMIC_EXCHANGE(start, end, shift_amount, -idx * 2); \ 2577 #define ATOMIC_EXCHANGE_BYTE(i) \ 2579 constexpr int idx = (i); \ 2580 static_assert(idx <= 3 && idx >= 0, "idx is out of range!"); \ 2581 constexpr int start = 32 + 8 * (3 - idx); \ 2582 constexpr int end = start + 7; \ 2583 constexpr int shift_amount = idx * 8; \ 2584 ATOMIC_EXCHANGE(start, end, shift_amount, -idx); \ 2586 #define ATOMIC_EXCHANGE_HALFWORD(i) \ 2588 constexpr int idx = (i); \ 2589 static_assert(idx <= 1 && idx >= 0, "idx is out of range!"); \ 2590 constexpr int start = 32 + 16 * (1 - idx); \ 2591 constexpr int end = start + 15; \ 2592 constexpr int shift_amount = idx * 16; \ 2593 ATOMIC_EXCHANGE(start, end, shift_amount, -idx * 2); \ 2596 case kS390_Word64AtomicExchangeUint8:
2597 case kWord32AtomicExchangeInt8:
2598 case kWord32AtomicExchangeUint8: {
2599 Register base =
i.InputRegister(0);
2600 Register index =
i.InputRegister(1);
2601 Register value =
i.InputRegister(2);
2602 Register output =
i.OutputRegister();
2603 Label three, two, one, done;
2604 __ la(r1, MemOperand(base, index));
2605 __ tmll(r1, Operand(3));
2606 __ b(Condition(1), &three);
2607 __ b(Condition(2), &two);
2608 __ b(Condition(4), &one);
2611 ATOMIC_EXCHANGE_BYTE(0);
2616 ATOMIC_EXCHANGE_BYTE(1);
2621 ATOMIC_EXCHANGE_BYTE(2);
2626 ATOMIC_EXCHANGE_BYTE(3);
2629 if (opcode == kWord32AtomicExchangeInt8) {
2630 __ lgbr(output, output);
2632 __ llgcr(output, output);
2636 case kS390_Word64AtomicExchangeUint16:
2637 case kWord32AtomicExchangeInt16:
2638 case kWord32AtomicExchangeUint16: {
2639 Register base =
i.InputRegister(0);
2640 Register index =
i.InputRegister(1);
2641 Register value =
i.InputRegister(2);
2642 Register output =
i.OutputRegister();
2643 Label two, unaligned, done;
2644 __ la(r1, MemOperand(base, index));
2645 __ tmll(r1, Operand(3));
2646 __ b(Condition(2), &two);
2649 ATOMIC_EXCHANGE_HALFWORD(0);
2654 ATOMIC_EXCHANGE_HALFWORD(1);
2657 if (opcode == kWord32AtomicExchangeInt16) {
2658 __ lghr(output, output);
2660 __ llghr(output, output);
2664 case kS390_Word64AtomicExchangeUint32:
2665 case kWord32AtomicExchangeWord32: {
2666 Register base =
i.InputRegister(0);
2667 Register index =
i.InputRegister(1);
2668 Register value =
i.InputRegister(2);
2669 Register output =
i.OutputRegister();
2671 __ lay(r1, MemOperand(base, index));
2672 __ LoadlW(output, MemOperand(r1));
2674 __ cs(output, value, MemOperand(r1));
2675 __ bne(&do_cs, Label::kNear);
2678 case kWord32AtomicCompareExchangeInt8:
2679 ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_BYTE(LoadB);
2681 case kS390_Word64AtomicCompareExchangeUint8:
2682 case kWord32AtomicCompareExchangeUint8:
2683 ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_BYTE(LoadlB);
2685 case kWord32AtomicCompareExchangeInt16:
2686 ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_HALFWORD(LoadHalfWordP);
2688 case kS390_Word64AtomicCompareExchangeUint16:
2689 case kWord32AtomicCompareExchangeUint16:
2690 ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_HALFWORD(LoadLogicalHalfWordP);
2692 case kS390_Word64AtomicCompareExchangeUint32:
2693 case kWord32AtomicCompareExchangeWord32:
2694 ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_WORD();
2696 #define ATOMIC_BINOP_CASE(op, inst) \ 2697 case kWord32Atomic##op##Int8: \ 2698 ASSEMBLE_ATOMIC_BINOP_BYTE(inst, [&]() { \ 2699 intptr_t shift_right = static_cast<intptr_t>(shift_amount); \ 2700 __ srlk(result, prev, Operand(shift_right)); \ 2701 __ LoadB(result, result); \ 2704 case kS390_Word64Atomic##op##Uint8: \ 2705 case kWord32Atomic##op##Uint8: \ 2706 ASSEMBLE_ATOMIC_BINOP_BYTE(inst, [&]() { \ 2707 int rotate_left = shift_amount == 0 ? 0 : 64 - shift_amount; \ 2708 __ RotateInsertSelectBits(result, prev, Operand(56), Operand(63), \ 2709 Operand(static_cast<intptr_t>(rotate_left)), \ 2713 case kWord32Atomic##op##Int16: \ 2714 ASSEMBLE_ATOMIC_BINOP_HALFWORD(inst, [&]() { \ 2715 intptr_t shift_right = static_cast<intptr_t>(shift_amount); \ 2716 __ srlk(result, prev, Operand(shift_right)); \ 2717 __ LoadHalfWordP(result, result); \ 2720 case kS390_Word64Atomic##op##Uint16: \ 2721 case kWord32Atomic##op##Uint16: \ 2722 ASSEMBLE_ATOMIC_BINOP_HALFWORD(inst, [&]() { \ 2723 int rotate_left = shift_amount == 0 ? 0 : 64 - shift_amount; \ 2724 __ RotateInsertSelectBits(result, prev, Operand(48), Operand(63), \ 2725 Operand(static_cast<intptr_t>(rotate_left)), \ 2729 ATOMIC_BINOP_CASE(Add, Add32)
2730 ATOMIC_BINOP_CASE(Sub, Sub32)
2731 ATOMIC_BINOP_CASE(And, And)
2732 ATOMIC_BINOP_CASE(Or, Or)
2733 ATOMIC_BINOP_CASE(Xor, Xor)
2734 #undef ATOMIC_BINOP_CASE 2735 case kS390_Word64AtomicAddUint32:
2736 case kWord32AtomicAddWord32:
2737 ASSEMBLE_ATOMIC_BINOP_WORD(laa);
2739 case kS390_Word64AtomicSubUint32:
2740 case kWord32AtomicSubWord32:
2741 ASSEMBLE_ATOMIC_BINOP_WORD(LoadAndSub32);
2743 case kS390_Word64AtomicAndUint32:
2744 case kWord32AtomicAndWord32:
2745 ASSEMBLE_ATOMIC_BINOP_WORD(lan);
2747 case kS390_Word64AtomicOrUint32:
2748 case kWord32AtomicOrWord32:
2749 ASSEMBLE_ATOMIC_BINOP_WORD(lao);
2751 case kS390_Word64AtomicXorUint32:
2752 case kWord32AtomicXorWord32:
2753 ASSEMBLE_ATOMIC_BINOP_WORD(lax);
2755 case kS390_Word64AtomicAddUint64:
2756 ASSEMBLE_ATOMIC_BINOP_WORD64(laag);
2758 case kS390_Word64AtomicSubUint64:
2759 ASSEMBLE_ATOMIC_BINOP_WORD64(LoadAndSub64);
2761 case kS390_Word64AtomicAndUint64:
2762 ASSEMBLE_ATOMIC_BINOP_WORD64(lang);
2764 case kS390_Word64AtomicOrUint64:
2765 ASSEMBLE_ATOMIC_BINOP_WORD64(laog);
2767 case kS390_Word64AtomicXorUint64:
2768 ASSEMBLE_ATOMIC_BINOP_WORD64(laxg);
2770 case kS390_Word64AtomicExchangeUint64: {
2771 Register base =
i.InputRegister(0);
2772 Register index =
i.InputRegister(1);
2773 Register value =
i.InputRegister(2);
2774 Register output =
i.OutputRegister();
2776 __ la(r1, MemOperand(base, index));
2777 __ lg(output, MemOperand(r1));
2779 __ csg(output, value, MemOperand(r1));
2780 __ bne(&do_cs, Label::kNear);
2783 case kS390_Word64AtomicCompareExchangeUint64:
2784 ASSEMBLE_ATOMIC64_COMP_EXCHANGE_WORD64();
2794 void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
2795 S390OperandConverter
i(
this, instr);
2796 Label* tlabel = branch->true_label;
2797 Label* flabel = branch->false_label;
2798 ArchOpcode op = instr->arch_opcode();
2799 FlagsCondition condition = branch->condition;
2801 Condition cond = FlagsConditionToCondition(condition, op);
2802 if (op == kS390_CmpFloat || op == kS390_CmpDouble) {
2805 if (cond == le || cond == eq || cond == lt) {
2806 __ bunordered(flabel);
2807 }
else if (cond == gt || cond == ne || cond == ge) {
2808 __ bunordered(tlabel);
2812 if (!branch->fallthru) __ b(flabel);
2815 void CodeGenerator::AssembleBranchPoisoning(FlagsCondition condition,
2816 Instruction* instr) {
2818 if (condition == kUnorderedEqual || condition == kUnorderedNotEqual ||
2819 condition == kOverflow || condition == kNotOverflow) {
2823 condition = NegateFlagsCondition(condition);
2824 __ LoadImmP(r0, Operand::Zero());
2825 __ LoadOnConditionP(FlagsConditionToCondition(condition, kArchNop),
2826 kSpeculationPoisonRegister, r0);
2829 void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr,
2830 BranchInfo* branch) {
2831 AssembleArchBranch(instr, branch);
2834 void CodeGenerator::AssembleArchJump(RpoNumber target) {
2835 if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
2838 void CodeGenerator::AssembleArchTrap(Instruction* instr,
2839 FlagsCondition condition) {
2840 class OutOfLineTrap final :
public OutOfLineCode {
2842 OutOfLineTrap(CodeGenerator* gen, Instruction* instr)
2843 : OutOfLineCode(gen), instr_(instr), gen_(gen) {}
2845 void Generate() final {
2846 S390OperandConverter
i(gen_, instr_);
2848 static_cast<TrapId
>(
i.InputInt32(instr_->InputCount() - 1));
2849 GenerateCallToTrap(trap_id);
2853 void GenerateCallToTrap(TrapId trap_id) {
2854 if (trap_id == TrapId::kInvalid) {
2859 __ PrepareCallCFunction(0, 0, cp);
2861 ExternalReference::wasm_call_trap_callback_for_testing(), 0);
2862 __ LeaveFrame(StackFrame::WASM_COMPILED);
2863 auto call_descriptor = gen_->linkage()->GetIncomingDescriptor();
2865 static_cast<int>(call_descriptor->StackParameterCount());
2869 gen_->AssembleSourcePosition(instr_);
2873 __ Call(static_cast<Address>(trap_id), RelocInfo::WASM_STUB_CALL);
2874 ReferenceMap* reference_map =
2875 new (gen_->zone()) ReferenceMap(gen_->zone());
2876 gen_->RecordSafepoint(reference_map, Safepoint::kSimple, 0,
2877 Safepoint::kNoLazyDeopt);
2878 if (FLAG_debug_code) {
2879 __ stop(GetAbortReason(AbortReason::kUnexpectedReturnFromWasmTrap));
2884 Instruction* instr_;
2885 CodeGenerator* gen_;
2887 auto ool =
new (zone()) OutOfLineTrap(
this, instr);
2888 Label* tlabel = ool->entry();
2891 ArchOpcode op = instr->arch_opcode();
2892 Condition cond = FlagsConditionToCondition(condition, op);
2893 if (op == kS390_CmpFloat || op == kS390_CmpDouble) {
2895 if (cond == le || cond == eq || cond == lt) {
2896 __ bunordered(&end);
2897 }
else if (cond == gt || cond == ne || cond == ge) {
2898 __ bunordered(tlabel);
2906 void CodeGenerator::AssembleArchBoolean(Instruction* instr,
2907 FlagsCondition condition) {
2908 S390OperandConverter
i(
this, instr);
2909 ArchOpcode op = instr->arch_opcode();
2910 bool check_unordered = (op == kS390_CmpDouble || op == kS390_CmpFloat);
2913 DCHECK((condition != kOverflow && condition != kNotOverflow) ||
2914 (op == kS390_Add32 || op == kS390_Add64 || op == kS390_Sub32 ||
2915 op == kS390_Sub64 || op == kS390_Mul32));
2919 DCHECK_NE(0u, instr->OutputCount());
2920 Register reg =
i.OutputRegister(instr->OutputCount() - 1);
2921 Condition cond = FlagsConditionToCondition(condition, op);
2923 if (check_unordered) {
2924 __ LoadImmP(reg, (cond == eq || cond == le || cond == lt) ? Operand::Zero()
2926 __ bunordered(&done);
2930 __ LoadImmP(reg, Operand::Zero());
2931 __ LoadImmP(kScratchReg, Operand(1));
2933 __ locr(cond, reg, kScratchReg);
2937 void CodeGenerator::AssembleArchBinarySearchSwitch(Instruction* instr) {
2938 S390OperandConverter
i(
this, instr);
2939 Register input =
i.InputRegister(0);
2940 std::vector<std::pair<int32_t, Label*>> cases;
2941 for (
size_t index = 2; index < instr->InputCount(); index += 2) {
2942 cases.push_back({
i.InputInt32(index + 0), GetLabel(
i.InputRpo(index + 1))});
2944 AssembleArchBinarySearchSwitchRange(input,
i.InputRpo(1), cases.data(),
2945 cases.data() + cases.size());
2948 void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
2949 S390OperandConverter
i(
this, instr);
2950 Register input =
i.InputRegister(0);
2951 for (
size_t index = 2; index < instr->InputCount(); index += 2) {
2952 __ Cmp32(input, Operand(
i.InputInt32(index + 0)));
2953 __ beq(GetLabel(
i.InputRpo(index + 1)));
2955 AssembleArchJump(
i.InputRpo(1));
2958 void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
2959 S390OperandConverter
i(
this, instr);
2960 Register input =
i.InputRegister(0);
2961 int32_t
const case_count =
static_cast<int32_t
>(instr->InputCount() - 2);
2962 Label** cases = zone()->NewArray<Label*>(case_count);
2963 for (int32_t index = 0; index < case_count; ++index) {
2964 cases[index] = GetLabel(
i.InputRpo(index + 2));
2966 Label*
const table = AddJumpTable(cases, case_count);
2967 __ CmpLogicalP(input, Operand(case_count));
2968 __ bge(GetLabel(
i.InputRpo(1)));
2969 __ larl(kScratchReg, table);
2970 __ ShiftLeftP(r1, input, Operand(kPointerSizeLog2));
2971 __ LoadP(kScratchReg, MemOperand(kScratchReg, r1));
2972 __ Jump(kScratchReg);
2975 void CodeGenerator::FinishFrame(Frame* frame) {
2976 auto call_descriptor = linkage()->GetIncomingDescriptor();
2977 const RegList double_saves = call_descriptor->CalleeSavedFPRegisters();
2980 if (double_saves != 0) {
2981 frame->AlignSavedCalleeRegisterSlots();
2982 DCHECK_EQ(kNumCalleeSavedDoubles,
2983 base::bits::CountPopulation(double_saves));
2984 frame->AllocateSavedCalleeRegisterSlots(kNumCalleeSavedDoubles *
2985 (kDoubleSize / kPointerSize));
2988 const RegList saves = call_descriptor->CalleeSavedRegisters();
2991 const int num_saves = kNumCalleeSaved - 1;
2992 DCHECK(num_saves == base::bits::CountPopulation(saves));
2993 frame->AllocateSavedCalleeRegisterSlots(num_saves);
2997 void CodeGenerator::AssembleConstructFrame() {
2998 auto call_descriptor = linkage()->GetIncomingDescriptor();
3000 if (frame_access_state()->has_frame()) {
3001 if (call_descriptor->IsCFunctionCall()) {
3004 }
else if (call_descriptor->IsJSFunctionCall()) {
3006 if (call_descriptor->PushArgumentCount()) {
3007 __ Push(kJavaScriptCallArgCountRegister);
3010 StackFrame::Type type = info()->GetOutputStackFrameType();
3013 __ StubPrologue(type);
3014 if (call_descriptor->IsWasmFunctionCall()) {
3015 __ Push(kWasmInstanceRegister);
3016 }
else if (call_descriptor->IsWasmImportWrapper()) {
3021 __ LoadP(kJSFunctionRegister,
3022 FieldMemOperand(kWasmInstanceRegister, Tuple2::kValue2Offset));
3023 __ LoadP(kWasmInstanceRegister,
3024 FieldMemOperand(kWasmInstanceRegister, Tuple2::kValue1Offset));
3025 __ Push(kWasmInstanceRegister);
3030 int shrink_slots = frame()->GetTotalFrameSlotCount() -
3031 call_descriptor->CalculateFixedFrameSize();
3032 if (info()->is_osr()) {
3034 __ Abort(AbortReason::kShouldNotDirectlyEnterOsrFunction);
3040 if (FLAG_code_comments) __ RecordComment(
"-- OSR entrypoint --");
3041 osr_pc_offset_ = __ pc_offset();
3042 shrink_slots -= osr_helper()->UnoptimizedFrameSlots();
3043 ResetSpeculationPoison();
3046 const RegList saves_fp = call_descriptor->CalleeSavedFPRegisters();
3047 const RegList saves = call_descriptor->CalleeSavedRegisters();
3049 if (shrink_slots > 0) {
3050 if (info()->IsWasm() && shrink_slots > 128) {
3060 if ((shrink_slots * kPointerSize) < (FLAG_stack_size * 1024)) {
3061 Register scratch = r1;
3064 FieldMemOperand(kWasmInstanceRegister,
3065 WasmInstanceObject::kRealStackLimitAddressOffset));
3066 __ LoadP(scratch, MemOperand(scratch));
3067 __ AddP(scratch, scratch, Operand(shrink_slots * kPointerSize));
3068 __ CmpLogicalP(sp, scratch);
3072 __ LoadP(r4, FieldMemOperand(kWasmInstanceRegister,
3073 WasmInstanceObject::kCEntryStubOffset));
3074 __ Move(cp, Smi::zero());
3075 __ CallRuntimeWithCEntry(Runtime::kThrowWasmStackOverflow, r4);
3077 ReferenceMap* reference_map =
new (zone()) ReferenceMap(zone());
3078 RecordSafepoint(reference_map, Safepoint::kSimple, 0,
3079 Safepoint::kNoLazyDeopt);
3080 if (FLAG_debug_code) {
3081 __ stop(GetAbortReason(AbortReason::kUnexpectedReturnFromThrow));
3088 shrink_slots -= base::bits::CountPopulation(saves);
3089 shrink_slots -= frame()->GetReturnSlotCount();
3091 (kDoubleSize / kPointerSize) * base::bits::CountPopulation(saves_fp);
3092 __ lay(sp, MemOperand(sp, -shrink_slots * kPointerSize));
3096 if (saves_fp != 0) {
3097 __ MultiPushDoubles(saves_fp);
3098 DCHECK_EQ(kNumCalleeSavedDoubles, base::bits::CountPopulation(saves_fp));
3103 __ MultiPush(saves);
3107 const int returns = frame()->GetReturnSlotCount();
3110 __ lay(sp, MemOperand(sp, -returns * kPointerSize));
3114 void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
3115 auto call_descriptor = linkage()->GetIncomingDescriptor();
3116 int pop_count =
static_cast<int>(call_descriptor->StackParameterCount());
3118 const int returns = frame()->GetReturnSlotCount();
3121 __ lay(sp, MemOperand(sp, returns * kPointerSize));
3125 const RegList saves = call_descriptor->CalleeSavedRegisters();
3131 const RegList double_saves = call_descriptor->CalleeSavedFPRegisters();
3132 if (double_saves != 0) {
3133 __ MultiPopDoubles(double_saves);
3136 S390OperandConverter g(
this,
nullptr);
3137 if (call_descriptor->IsCFunctionCall()) {
3138 AssembleDeconstructFrame();
3139 }
else if (frame_access_state()->has_frame()) {
3142 if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
3143 if (return_label_.is_bound()) {
3144 __ b(&return_label_);
3147 __ bind(&return_label_);
3148 AssembleDeconstructFrame();
3151 AssembleDeconstructFrame();
3154 if (pop->IsImmediate()) {
3155 pop_count += g.ToConstant(pop).ToInt32();
3157 __ Drop(g.ToRegister(pop));
3163 void CodeGenerator::FinishCode() {}
3165 void CodeGenerator::AssembleMove(InstructionOperand* source,
3166 InstructionOperand* destination) {
3167 S390OperandConverter g(
this,
nullptr);
3170 if (source->IsRegister()) {
3171 DCHECK(destination->IsRegister() || destination->IsStackSlot());
3172 Register src = g.ToRegister(source);
3173 if (destination->IsRegister()) {
3174 __ Move(g.ToRegister(destination), src);
3176 __ StoreP(src, g.ToMemOperand(destination));
3178 }
else if (source->IsStackSlot()) {
3179 DCHECK(destination->IsRegister() || destination->IsStackSlot());
3180 MemOperand src = g.ToMemOperand(source);
3181 if (destination->IsRegister()) {
3182 __ LoadP(g.ToRegister(destination), src);
3184 Register temp = kScratchReg;
3185 __ LoadP(temp, src, r0);
3186 __ StoreP(temp, g.ToMemOperand(destination));
3188 }
else if (source->IsConstant()) {
3189 Constant src = g.ToConstant(source);
3190 if (destination->IsRegister() || destination->IsStackSlot()) {
3192 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg;
3193 switch (src.type()) {
3194 case Constant::kInt32:
3195 #if V8_TARGET_ARCH_S390X 3198 if (RelocInfo::IsWasmReference(src.rmode())) {
3200 __ mov(dst, Operand(src.ToInt32(), src.rmode()));
3202 __ Load(dst, Operand(src.ToInt32()));
3205 case Constant::kInt64:
3206 #if V8_TARGET_ARCH_S390X 3207 if (RelocInfo::IsWasmReference(src.rmode())) {
3208 __ mov(dst, Operand(src.ToInt64(), src.rmode()));
3210 __ Load(dst, Operand(src.ToInt64()));
3213 __ mov(dst, Operand(src.ToInt64()));
3214 #endif // V8_TARGET_ARCH_S390X 3216 case Constant::kFloat32:
3217 __ mov(dst, Operand::EmbeddedNumber(src.ToFloat32()));
3219 case Constant::kFloat64:
3220 __ mov(dst, Operand::EmbeddedNumber(src.ToFloat64().value()));
3222 case Constant::kExternalReference:
3223 __ Move(dst, src.ToExternalReference());
3225 case Constant::kDelayedStringConstant:
3226 __ mov(dst, Operand::EmbeddedStringConstant(
3227 src.ToDelayedStringConstant()));
3229 case Constant::kHeapObject: {
3230 Handle<HeapObject> src_object = src.ToHeapObject();
3232 if (IsMaterializableFromRoot(src_object, &index)) {
3233 __ LoadRoot(dst, index);
3235 __ Move(dst, src_object);
3239 case Constant::kRpoNumber:
3243 if (destination->IsStackSlot()) {
3244 __ StoreP(dst, g.ToMemOperand(destination), r0);
3247 DoubleRegister dst = destination->IsFPRegister()
3248 ? g.ToDoubleRegister(destination)
3249 : kScratchDoubleReg;
3250 double value = (src.type() == Constant::kFloat32)
3252 : src.ToFloat64().value();
3253 if (src.type() == Constant::kFloat32) {
3254 __ LoadFloat32Literal(dst, src.ToFloat32(), kScratchReg);
3256 __ LoadDoubleLiteral(dst, value, kScratchReg);
3259 if (destination->IsFloatStackSlot()) {
3260 __ StoreFloat32(dst, g.ToMemOperand(destination));
3261 }
else if (destination->IsDoubleStackSlot()) {
3262 __ StoreDouble(dst, g.ToMemOperand(destination));
3265 }
else if (source->IsFPRegister()) {
3266 DoubleRegister src = g.ToDoubleRegister(source);
3267 if (destination->IsFPRegister()) {
3268 DoubleRegister dst = g.ToDoubleRegister(destination);
3271 DCHECK(destination->IsFPStackSlot());
3272 LocationOperand* op = LocationOperand::cast(source);
3273 if (op->representation() == MachineRepresentation::kFloat64) {
3274 __ StoreDouble(src, g.ToMemOperand(destination));
3276 __ StoreFloat32(src, g.ToMemOperand(destination));
3279 }
else if (source->IsFPStackSlot()) {
3280 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot());
3281 MemOperand src = g.ToMemOperand(source);
3282 if (destination->IsFPRegister()) {
3283 LocationOperand* op = LocationOperand::cast(source);
3284 if (op->representation() == MachineRepresentation::kFloat64) {
3285 __ LoadDouble(g.ToDoubleRegister(destination), src);
3287 __ LoadFloat32(g.ToDoubleRegister(destination), src);
3290 LocationOperand* op = LocationOperand::cast(source);
3291 DoubleRegister temp = kScratchDoubleReg;
3292 if (op->representation() == MachineRepresentation::kFloat64) {
3293 __ LoadDouble(temp, src);
3294 __ StoreDouble(temp, g.ToMemOperand(destination));
3296 __ LoadFloat32(temp, src);
3297 __ StoreFloat32(temp, g.ToMemOperand(destination));
3313 void CodeGenerator::AssembleSwap(InstructionOperand* source,
3314 InstructionOperand* destination) {
3315 S390OperandConverter g(
this,
nullptr);
3316 if (source->IsRegister()) {
3317 Register src = g.ToRegister(source);
3318 if (destination->IsRegister()) {
3319 __ SwapP(src, g.ToRegister(destination), kScratchReg);
3321 DCHECK(destination->IsStackSlot());
3322 __ SwapP(src, g.ToMemOperand(destination), kScratchReg);
3324 }
else if (source->IsStackSlot()) {
3325 DCHECK(destination->IsStackSlot());
3326 __ SwapP(g.ToMemOperand(source), g.ToMemOperand(destination), kScratchReg,
3328 }
else if (source->IsFloatRegister()) {
3329 DoubleRegister src = g.ToDoubleRegister(source);
3330 if (destination->IsFloatRegister()) {
3331 __ SwapFloat32(src, g.ToDoubleRegister(destination), kScratchDoubleReg);
3333 DCHECK(destination->IsFloatStackSlot());
3334 __ SwapFloat32(src, g.ToMemOperand(destination), kScratchDoubleReg);
3336 }
else if (source->IsDoubleRegister()) {
3337 DoubleRegister src = g.ToDoubleRegister(source);
3338 if (destination->IsDoubleRegister()) {
3339 __ SwapDouble(src, g.ToDoubleRegister(destination), kScratchDoubleReg);
3341 DCHECK(destination->IsDoubleStackSlot());
3342 __ SwapDouble(src, g.ToMemOperand(destination), kScratchDoubleReg);
3344 }
else if (source->IsFloatStackSlot()) {
3345 DCHECK(destination->IsFloatStackSlot());
3346 __ SwapFloat32(g.ToMemOperand(source), g.ToMemOperand(destination),
3347 kScratchDoubleReg, d0);
3348 }
else if (source->IsDoubleStackSlot()) {
3349 DCHECK(destination->IsDoubleStackSlot());
3350 __ SwapDouble(g.ToMemOperand(source), g.ToMemOperand(destination),
3351 kScratchDoubleReg, d0);
3352 }
else if (source->IsSimd128Register()) {
3359 void CodeGenerator::AssembleJumpTable(Label** targets,
size_t target_count) {
3360 for (
size_t index = 0; index < target_count; ++index) {
3361 __ emit_label_addr(targets[index]);