5 #include "src/compiler/backend/code-generator.h" 7 #include "src/assembler-inl.h" 8 #include "src/callable.h" 9 #include "src/compiler/backend/code-generator-impl.h" 10 #include "src/compiler/backend/gap-resolver.h" 11 #include "src/compiler/node-matchers.h" 12 #include "src/compiler/osr.h" 13 #include "src/double.h" 14 #include "src/macro-assembler.h" 15 #include "src/optimized-compilation-info.h" 16 #include "src/wasm/wasm-code-manager.h" 17 #include "src/wasm/wasm-objects.h" 25 #define kScratchReg r11 33 size_t OutputCount() {
return instr_->OutputCount(); }
35 RCBit OutputRCBit()
const {
36 switch (instr_->flags_mode()) {
38 case kFlags_branch_and_poison:
39 case kFlags_deoptimize:
40 case kFlags_deoptimize_and_poison:
50 bool CompareLogical()
const {
51 switch (instr_->flags_condition()) {
52 case kUnsignedLessThan:
53 case kUnsignedGreaterThanOrEqual:
54 case kUnsignedLessThanOrEqual:
55 case kUnsignedGreaterThan:
63 Operand InputImmediate(
size_t index) {
64 Constant constant = ToConstant(instr_->InputAt(index));
65 switch (constant.type()) {
66 case Constant::kInt32:
67 return Operand(constant.ToInt32());
68 case Constant::kFloat32:
69 return Operand::EmbeddedNumber(constant.ToFloat32());
70 case Constant::kFloat64:
71 return Operand::EmbeddedNumber(constant.ToFloat64().value());
72 case Constant::kInt64:
73 #if V8_TARGET_ARCH_PPC64 74 return Operand(constant.ToInt64());
76 case Constant::kExternalReference:
77 return Operand(constant.ToExternalReference());
78 case Constant::kDelayedStringConstant:
79 return Operand::EmbeddedStringConstant(
80 constant.ToDelayedStringConstant());
81 case Constant::kHeapObject:
82 case Constant::kRpoNumber:
88 MemOperand MemoryOperand(AddressingMode* mode,
size_t* first_index) {
89 const size_t index = *first_index;
90 *mode = AddressingModeField::decode(instr_->opcode());
96 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
99 return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
104 MemOperand MemoryOperand(AddressingMode* mode,
size_t first_index = 0) {
105 return MemoryOperand(mode, &first_index);
110 DCHECK(op->IsStackSlot() || op->IsFPStackSlot());
111 return SlotToMemOperand(AllocatedOperand::cast(op)->index());
115 FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
116 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
120 static inline bool HasRegisterInput(
Instruction* instr,
size_t index) {
121 return instr->InputAt(index)->IsRegister();
126 class OutOfLineRecordWrite final :
public OutOfLineCode {
130 RecordWriteMode mode, StubCallMode stub_mode)
131 : OutOfLineCode(gen),
134 offset_immediate_(0),
139 stub_mode_(stub_mode),
140 must_save_lr_(!gen->frame_access_state()->has_frame()),
141 zone_(gen->zone()) {}
143 OutOfLineRecordWrite(CodeGenerator* gen, Register
object, int32_t offset,
144 Register value, Register scratch0, Register scratch1,
145 RecordWriteMode mode, StubCallMode stub_mode)
146 : OutOfLineCode(gen),
149 offset_immediate_(offset),
154 stub_mode_(stub_mode),
155 must_save_lr_(!gen->frame_access_state()->has_frame()),
156 zone_(gen->zone()) {}
158 void Generate() final {
159 ConstantPoolUnavailableScope constant_pool_unavailable(tasm());
160 if (mode_ > RecordWriteMode::kValueIsPointer) {
161 __ JumpIfSmi(value_, exit());
163 __ CheckPageFlag(value_, scratch0_,
164 MemoryChunk::kPointersToHereAreInterestingMask, eq,
166 if (offset_ == no_reg) {
167 __ addi(scratch1_, object_, Operand(offset_immediate_));
169 DCHECK_EQ(0, offset_immediate_);
170 __ add(scratch1_, object_, offset_);
172 RememberedSetAction
const remembered_set_action =
173 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
174 : OMIT_REMEMBERED_SET;
175 SaveFPRegsMode
const save_fp_mode =
176 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
182 if (stub_mode_ == StubCallMode::kCallWasmRuntimeStub) {
183 __ CallRecordWriteStub(object_, scratch1_, remembered_set_action,
184 save_fp_mode, wasm::WasmCode::kWasmRecordWrite);
186 __ CallRecordWriteStub(object_, scratch1_, remembered_set_action,
197 Register
const object_;
198 Register
const offset_;
199 int32_t
const offset_immediate_;
200 Register
const value_;
201 Register
const scratch0_;
202 Register
const scratch1_;
203 RecordWriteMode
const mode_;
204 StubCallMode stub_mode_;
209 Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) {
215 case kSignedLessThan:
216 case kUnsignedLessThan:
218 case kSignedGreaterThanOrEqual:
219 case kUnsignedGreaterThanOrEqual:
221 case kSignedLessThanOrEqual:
222 case kUnsignedLessThanOrEqual:
224 case kSignedGreaterThan:
225 case kUnsignedGreaterThan:
230 #if V8_TARGET_ARCH_PPC64 235 case kPPC_AddWithOverflow32:
236 case kPPC_SubWithOverflow32:
244 #if V8_TARGET_ARCH_PPC64 249 case kPPC_AddWithOverflow32:
250 case kPPC_SubWithOverflow32:
262 void EmitWordLoadPoisoningIfNeeded(CodeGenerator* codegen, Instruction* instr,
263 PPCOperandConverter&
i) {
264 const MemoryAccessMode access_mode =
265 static_cast<MemoryAccessMode
>(MiscField::decode(instr->opcode()));
266 if (access_mode == kMemoryAccessPoisoned) {
267 Register value =
i.OutputRegister();
268 codegen->tasm()->and_(value, value, kSpeculationPoisonRegister);
274 #define ASSEMBLE_FLOAT_UNOP_RC(asm_instr, round) \ 276 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \ 279 __ frsp(i.OutputDoubleRegister(), i.OutputDoubleRegister()); \ 283 #define ASSEMBLE_FLOAT_BINOP_RC(asm_instr, round) \ 285 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \ 286 i.InputDoubleRegister(1), i.OutputRCBit()); \ 288 __ frsp(i.OutputDoubleRegister(), i.OutputDoubleRegister()); \ 292 #define ASSEMBLE_BINOP(asm_instr_reg, asm_instr_imm) \ 294 if (HasRegisterInput(instr, 1)) { \ 295 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \ 296 i.InputRegister(1)); \ 298 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \ 299 i.InputImmediate(1)); \ 303 #define ASSEMBLE_BINOP_RC(asm_instr_reg, asm_instr_imm) \ 305 if (HasRegisterInput(instr, 1)) { \ 306 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \ 307 i.InputRegister(1), i.OutputRCBit()); \ 309 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \ 310 i.InputImmediate(1), i.OutputRCBit()); \ 314 #define ASSEMBLE_BINOP_INT_RC(asm_instr_reg, asm_instr_imm) \ 316 if (HasRegisterInput(instr, 1)) { \ 317 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \ 318 i.InputRegister(1), i.OutputRCBit()); \ 320 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \ 321 i.InputInt32(1), i.OutputRCBit()); \ 325 #define ASSEMBLE_ADD_WITH_OVERFLOW() \ 327 if (HasRegisterInput(instr, 1)) { \ 328 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \ 329 i.InputRegister(1), kScratchReg, r0); \ 331 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \ 332 i.InputInt32(1), kScratchReg, r0); \ 336 #define ASSEMBLE_SUB_WITH_OVERFLOW() \ 338 if (HasRegisterInput(instr, 1)) { \ 339 __ SubAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \ 340 i.InputRegister(1), kScratchReg, r0); \ 342 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \ 343 -i.InputInt32(1), kScratchReg, r0); \ 347 #if V8_TARGET_ARCH_PPC64 348 #define ASSEMBLE_ADD_WITH_OVERFLOW32() \ 350 ASSEMBLE_ADD_WITH_OVERFLOW(); \ 351 __ extsw(kScratchReg, kScratchReg, SetRC); \ 354 #define ASSEMBLE_SUB_WITH_OVERFLOW32() \ 356 ASSEMBLE_SUB_WITH_OVERFLOW(); \ 357 __ extsw(kScratchReg, kScratchReg, SetRC); \ 360 #define ASSEMBLE_ADD_WITH_OVERFLOW32 ASSEMBLE_ADD_WITH_OVERFLOW 361 #define ASSEMBLE_SUB_WITH_OVERFLOW32 ASSEMBLE_SUB_WITH_OVERFLOW 364 #define ASSEMBLE_COMPARE(cmp_instr, cmpl_instr) \ 366 const CRegister cr = cr0; \ 367 if (HasRegisterInput(instr, 1)) { \ 368 if (i.CompareLogical()) { \ 369 __ cmpl_instr(i.InputRegister(0), i.InputRegister(1), cr); \ 371 __ cmp_instr(i.InputRegister(0), i.InputRegister(1), cr); \ 374 if (i.CompareLogical()) { \ 375 __ cmpl_instr##i(i.InputRegister(0), i.InputImmediate(1), cr); \ 377 __ cmp_instr##i(i.InputRegister(0), i.InputImmediate(1), cr); \ 380 DCHECK_EQ(SetRC, i.OutputRCBit()); \ 383 #define ASSEMBLE_FLOAT_COMPARE(cmp_instr) \ 385 const CRegister cr = cr0; \ 386 __ cmp_instr(i.InputDoubleRegister(0), i.InputDoubleRegister(1), cr); \ 387 DCHECK_EQ(SetRC, i.OutputRCBit()); \ 390 #define ASSEMBLE_MODULO(div_instr, mul_instr) \ 392 const Register scratch = kScratchReg; \ 393 __ div_instr(scratch, i.InputRegister(0), i.InputRegister(1)); \ 394 __ mul_instr(scratch, scratch, i.InputRegister(1)); \ 395 __ sub(i.OutputRegister(), i.InputRegister(0), scratch, LeaveOE, \ 399 #define ASSEMBLE_FLOAT_MODULO() \ 401 FrameScope scope(tasm(), StackFrame::MANUAL); \ 402 __ PrepareCallCFunction(0, 2, kScratchReg); \ 403 __ MovToFloatParameters(i.InputDoubleRegister(0), \ 404 i.InputDoubleRegister(1)); \ 405 __ CallCFunction(ExternalReference::mod_two_doubles_operation(), 0, 2); \ 406 __ MovFromFloatResult(i.OutputDoubleRegister()); \ 407 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ 410 #define ASSEMBLE_IEEE754_UNOP(name) \ 414 FrameScope scope(tasm(), StackFrame::MANUAL); \ 415 __ PrepareCallCFunction(0, 1, kScratchReg); \ 416 __ MovToFloatParameter(i.InputDoubleRegister(0)); \ 417 __ CallCFunction(ExternalReference::ieee754_##name##_function(), 0, 1); \ 419 __ MovFromFloatResult(i.OutputDoubleRegister()); \ 420 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ 423 #define ASSEMBLE_IEEE754_BINOP(name) \ 427 FrameScope scope(tasm(), StackFrame::MANUAL); \ 428 __ PrepareCallCFunction(0, 2, kScratchReg); \ 429 __ MovToFloatParameters(i.InputDoubleRegister(0), \ 430 i.InputDoubleRegister(1)); \ 431 __ CallCFunction(ExternalReference::ieee754_##name##_function(), 0, 2); \ 433 __ MovFromFloatResult(i.OutputDoubleRegister()); \ 434 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ 437 #define ASSEMBLE_FLOAT_MAX() \ 439 DoubleRegister left_reg = i.InputDoubleRegister(0); \ 440 DoubleRegister right_reg = i.InputDoubleRegister(1); \ 441 DoubleRegister result_reg = i.OutputDoubleRegister(); \ 442 Label check_nan_left, check_zero, return_left, return_right, done; \ 443 __ fcmpu(left_reg, right_reg); \ 444 __ bunordered(&check_nan_left); \ 445 __ beq(&check_zero); \ 446 __ bge(&return_left); \ 447 __ b(&return_right); \ 449 __ bind(&check_zero); \ 450 __ fcmpu(left_reg, kDoubleRegZero); \ 452 __ bne(&return_left); \ 454 __ fadd(result_reg, left_reg, right_reg); \ 457 __ bind(&check_nan_left); \ 458 __ fcmpu(left_reg, left_reg); \ 460 __ bunordered(&return_left); \ 461 __ bind(&return_right); \ 462 if (right_reg != result_reg) { \ 463 __ fmr(result_reg, right_reg); \ 467 __ bind(&return_left); \ 468 if (left_reg != result_reg) { \ 469 __ fmr(result_reg, left_reg); \ 474 #define ASSEMBLE_FLOAT_MIN() \ 476 DoubleRegister left_reg = i.InputDoubleRegister(0); \ 477 DoubleRegister right_reg = i.InputDoubleRegister(1); \ 478 DoubleRegister result_reg = i.OutputDoubleRegister(); \ 479 Label check_nan_left, check_zero, return_left, return_right, done; \ 480 __ fcmpu(left_reg, right_reg); \ 481 __ bunordered(&check_nan_left); \ 482 __ beq(&check_zero); \ 483 __ ble(&return_left); \ 484 __ b(&return_right); \ 486 __ bind(&check_zero); \ 487 __ fcmpu(left_reg, kDoubleRegZero); \ 489 __ bne(&return_left); \ 494 __ fneg(kScratchDoubleReg, left_reg); \ 495 if (kScratchDoubleReg == right_reg) { \ 496 __ fadd(result_reg, kScratchDoubleReg, right_reg); \ 498 __ fsub(result_reg, kScratchDoubleReg, right_reg); \ 500 __ fneg(result_reg, result_reg); \ 503 __ bind(&check_nan_left); \ 504 __ fcmpu(left_reg, left_reg); \ 506 __ bunordered(&return_left); \ 508 __ bind(&return_right); \ 509 if (right_reg != result_reg) { \ 510 __ fmr(result_reg, right_reg); \ 514 __ bind(&return_left); \ 515 if (left_reg != result_reg) { \ 516 __ fmr(result_reg, left_reg); \ 521 #define ASSEMBLE_LOAD_FLOAT(asm_instr, asm_instrx) \ 523 DoubleRegister result = i.OutputDoubleRegister(); \ 524 AddressingMode mode = kMode_None; \ 525 MemOperand operand = i.MemoryOperand(&mode); \ 526 bool is_atomic = i.InputInt32(2); \ 527 if (mode == kMode_MRI) { \ 528 __ asm_instr(result, operand); \ 530 __ asm_instrx(result, operand); \ 532 if (is_atomic) __ lwsync(); \ 533 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ 536 #define ASSEMBLE_LOAD_INTEGER(asm_instr, asm_instrx) \ 538 Register result = i.OutputRegister(); \ 539 AddressingMode mode = kMode_None; \ 540 MemOperand operand = i.MemoryOperand(&mode); \ 541 bool is_atomic = i.InputInt32(2); \ 542 if (mode == kMode_MRI) { \ 543 __ asm_instr(result, operand); \ 545 __ asm_instrx(result, operand); \ 547 if (is_atomic) __ lwsync(); \ 548 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ 551 #define ASSEMBLE_STORE_FLOAT(asm_instr, asm_instrx) \ 554 AddressingMode mode = kMode_None; \ 555 MemOperand operand = i.MemoryOperand(&mode, &index); \ 556 DoubleRegister value = i.InputDoubleRegister(index); \ 557 bool is_atomic = i.InputInt32(3); \ 558 if (is_atomic) __ lwsync(); \ 561 if (mode == kMode_MRI) { \ 562 __ asm_instr(value, operand); \ 564 __ asm_instrx(value, operand); \ 566 if (is_atomic) __ sync(); \ 567 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ 570 #define ASSEMBLE_STORE_INTEGER(asm_instr, asm_instrx) \ 573 AddressingMode mode = kMode_None; \ 574 MemOperand operand = i.MemoryOperand(&mode, &index); \ 575 Register value = i.InputRegister(index); \ 576 bool is_atomic = i.InputInt32(3); \ 577 if (is_atomic) __ lwsync(); \ 578 if (mode == kMode_MRI) { \ 579 __ asm_instr(value, operand); \ 581 __ asm_instrx(value, operand); \ 583 if (is_atomic) __ sync(); \ 584 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \ 587 #if V8_TARGET_ARCH_PPC64 589 #define CleanUInt32(x) __ ClearLeftImm(x, x, Operand(32)) 591 #define CleanUInt32(x) 594 #define ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(load_instr, store_instr) \ 598 __ bind(&exchange); \ 599 __ load_instr(i.OutputRegister(0), \ 600 MemOperand(i.InputRegister(0), i.InputRegister(1))); \ 601 __ store_instr(i.InputRegister(2), \ 602 MemOperand(i.InputRegister(0), i.InputRegister(1))); \ 603 __ bne(&exchange, cr0); \ 607 #define ASSEMBLE_ATOMIC_BINOP(bin_inst, load_inst, store_inst) \ 609 MemOperand operand = MemOperand(i.InputRegister(0), i.InputRegister(1)); \ 613 __ load_inst(i.OutputRegister(), operand); \ 614 __ bin_inst(kScratchReg, i.OutputRegister(), i.InputRegister(2)); \ 615 __ store_inst(kScratchReg, operand); \ 616 __ bne(&binop, cr0); \ 620 #define ASSEMBLE_ATOMIC_BINOP_SIGN_EXT(bin_inst, load_inst, store_inst, \ 623 MemOperand operand = MemOperand(i.InputRegister(0), i.InputRegister(1)); \ 627 __ load_inst(i.OutputRegister(), operand); \ 628 __ ext_instr(i.OutputRegister(), i.OutputRegister()); \ 629 __ bin_inst(kScratchReg, i.OutputRegister(), i.InputRegister(2)); \ 630 __ store_inst(kScratchReg, operand); \ 631 __ bne(&binop, cr0); \ 635 #define ASSEMBLE_ATOMIC_COMPARE_EXCHANGE(cmp_inst, load_inst, store_inst) \ 637 MemOperand operand = MemOperand(i.InputRegister(0), i.InputRegister(1)); \ 642 __ load_inst(i.OutputRegister(), operand); \ 643 __ cmp_inst(i.OutputRegister(), i.InputRegister(2), cr0); \ 644 __ bne(&exit, cr0); \ 645 __ store_inst(i.InputRegister(3), operand); \ 646 __ bne(&loop, cr0); \ 651 #define ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_SIGN_EXT(cmp_inst, load_inst, \ 652 store_inst, ext_instr) \ 654 MemOperand operand = MemOperand(i.InputRegister(0), i.InputRegister(1)); \ 659 __ load_inst(i.OutputRegister(), operand); \ 660 __ ext_instr(i.OutputRegister(), i.OutputRegister()); \ 661 __ cmp_inst(i.OutputRegister(), i.InputRegister(2)); \ 662 __ bne(&exit, cr0); \ 663 __ store_inst(i.InputRegister(3), operand); \ 664 __ bne(&loop, cr0); \ 669 void CodeGenerator::AssembleDeconstructFrame() {
670 __ LeaveFrame(StackFrame::MANUAL);
673 void CodeGenerator::AssemblePrepareTailCall() {
674 if (frame_access_state()->has_frame()) {
675 __ RestoreFrameStateForTailCall();
677 frame_access_state()->SetFrameAccessToSP();
680 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
684 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
688 __ LoadP(scratch1, MemOperand(fp, StandardFrameConstants::kContextOffset));
690 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
695 Register caller_args_count_reg = scratch1;
696 __ LoadP(caller_args_count_reg,
697 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
698 __ SmiUntag(caller_args_count_reg);
700 ParameterCount callee_args_count(args_reg);
701 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
708 void FlushPendingPushRegisters(TurboAssembler* tasm,
709 FrameAccessState* frame_access_state,
710 ZoneVector<Register>* pending_pushes) {
711 switch (pending_pushes->size()) {
715 tasm->Push((*pending_pushes)[0]);
718 tasm->Push((*pending_pushes)[0], (*pending_pushes)[1]);
721 tasm->Push((*pending_pushes)[0], (*pending_pushes)[1],
722 (*pending_pushes)[2]);
728 frame_access_state->IncreaseSPDelta(pending_pushes->size());
729 pending_pushes->clear();
732 void AdjustStackPointerForTailCall(
733 TurboAssembler* tasm, FrameAccessState* state,
int new_slot_above_sp,
734 ZoneVector<Register>* pending_pushes =
nullptr,
735 bool allow_shrinkage =
true) {
736 int current_sp_offset = state->GetSPToFPSlotCount() +
737 StandardFrameConstants::kFixedSlotCountAboveFp;
738 int stack_slot_delta = new_slot_above_sp - current_sp_offset;
739 if (stack_slot_delta > 0) {
740 if (pending_pushes !=
nullptr) {
741 FlushPendingPushRegisters(tasm, state, pending_pushes);
743 tasm->Add(sp, sp, -stack_slot_delta * kPointerSize, r0);
744 state->IncreaseSPDelta(stack_slot_delta);
745 }
else if (allow_shrinkage && stack_slot_delta < 0) {
746 if (pending_pushes !=
nullptr) {
747 FlushPendingPushRegisters(tasm, state, pending_pushes);
749 tasm->Add(sp, sp, -stack_slot_delta * kPointerSize, r0);
750 state->IncreaseSPDelta(stack_slot_delta);
756 void CodeGenerator::AssembleTailCallBeforeGap(Instruction* instr,
757 int first_unused_stack_slot) {
758 ZoneVector<MoveOperands*> pushes(zone());
759 GetPushCompatibleMoves(instr, kRegisterPush, &pushes);
761 if (!pushes.empty() &&
762 (LocationOperand::cast(pushes.back()->destination()).index() + 1 ==
763 first_unused_stack_slot)) {
764 PPCOperandConverter g(
this, instr);
765 ZoneVector<Register> pending_pushes(zone());
766 for (
auto move : pushes) {
767 LocationOperand destination_location(
768 LocationOperand::cast(move->destination()));
769 InstructionOperand source(move->source());
770 AdjustStackPointerForTailCall(
771 tasm(), frame_access_state(),
772 destination_location.index() - pending_pushes.size(),
775 DCHECK(source.IsRegister());
776 LocationOperand source_location(LocationOperand::cast(source));
777 pending_pushes.push_back(source_location.GetRegister());
780 if (pending_pushes.size() == 3) {
781 FlushPendingPushRegisters(tasm(), frame_access_state(),
786 FlushPendingPushRegisters(tasm(), frame_access_state(), &pending_pushes);
788 AdjustStackPointerForTailCall(tasm(), frame_access_state(),
789 first_unused_stack_slot,
nullptr,
false);
792 void CodeGenerator::AssembleTailCallAfterGap(Instruction* instr,
793 int first_unused_stack_slot) {
794 AdjustStackPointerForTailCall(tasm(), frame_access_state(),
795 first_unused_stack_slot);
799 void CodeGenerator::AssembleCodeStartRegisterCheck() {
800 Register scratch = kScratchReg;
801 __ ComputeCodeStartAddress(scratch);
802 __ cmp(scratch, kJavaScriptCallCodeStartRegister);
803 __ Assert(eq, AbortReason::kWrongFunctionCodeStart);
813 void CodeGenerator::BailoutIfDeoptimized() {
814 if (FLAG_debug_code) {
816 __ ComputeCodeStartAddress(ip);
817 __ cmp(ip, kJavaScriptCallCodeStartRegister);
818 __ Assert(eq, AbortReason::kWrongFunctionCodeStart);
821 int offset = Code::kCodeDataContainerOffset - Code::kHeaderSize;
822 __ LoadP(r11, MemOperand(kJavaScriptCallCodeStartRegister, offset));
824 r11, FieldMemOperand(r11, CodeDataContainer::kKindSpecificFlagsOffset));
825 __ TestBit(r11, Code::kMarkedForDeoptimizationBit);
828 DCHECK(!isolate()->ShouldLoadConstantsFromRootList());
829 Handle<Code> code = isolate()->builtins()->builtin_handle(
830 Builtins::kCompileLazyDeoptimizedCode);
831 __ Jump(code, RelocInfo::CODE_TARGET, ne, cr0);
834 void CodeGenerator::GenerateSpeculationPoisonFromCodeStartRegister() {
835 Register scratch = kScratchReg;
837 __ ComputeCodeStartAddress(scratch);
841 __ cmp(kJavaScriptCallCodeStartRegister, scratch);
842 __ li(scratch, Operand::Zero());
843 __ notx(kSpeculationPoisonRegister, scratch);
844 __ isel(eq, kSpeculationPoisonRegister, kSpeculationPoisonRegister, scratch);
847 void CodeGenerator::AssembleRegisterArgumentPoisoning() {
848 __ and_(kJSFunctionRegister, kJSFunctionRegister, kSpeculationPoisonRegister);
849 __ and_(kContextRegister, kContextRegister, kSpeculationPoisonRegister);
850 __ and_(sp, sp, kSpeculationPoisonRegister);
854 CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
855 Instruction* instr) {
856 PPCOperandConverter
i(
this, instr);
857 ArchOpcode opcode = ArchOpcodeField::decode(instr->opcode());
860 case kArchCallCodeObject: {
863 if (HasRegisterInput(instr, 0)) {
864 Register reg =
i.InputRegister(0);
866 HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
867 reg == kJavaScriptCallCodeStartRegister);
868 __ addi(reg, reg, Operand(Code::kHeaderSize - kHeapObjectTag));
871 __ Call(
i.InputCode(0), RelocInfo::CODE_TARGET);
873 RecordCallPosition(instr);
874 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
875 frame_access_state()->ClearSPDelta();
878 case kArchCallWasmFunction: {
881 if (instr->InputAt(0)->IsImmediate()) {
882 Constant constant =
i.ToConstant(instr->InputAt(0));
883 #ifdef V8_TARGET_ARCH_PPC64 884 Address wasm_code =
static_cast<Address
>(constant.ToInt64());
886 Address wasm_code =
static_cast<Address
>(constant.ToInt32());
888 __ Call(wasm_code, constant.rmode());
890 __ Call(
i.InputRegister(0));
892 RecordCallPosition(instr);
893 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
894 frame_access_state()->ClearSPDelta();
897 case kArchTailCallCodeObjectFromJSFunction:
898 case kArchTailCallCodeObject: {
899 if (opcode == kArchTailCallCodeObjectFromJSFunction) {
900 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
901 i.TempRegister(0),
i.TempRegister(1),
904 if (HasRegisterInput(instr, 0)) {
905 Register reg =
i.InputRegister(0);
907 HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
908 reg == kJavaScriptCallCodeStartRegister);
909 __ addi(reg, reg, Operand(Code::kHeaderSize - kHeapObjectTag));
914 ConstantPoolUnavailableScope constant_pool_unavailable(tasm());
915 __ Jump(
i.InputCode(0), RelocInfo::CODE_TARGET);
917 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
918 frame_access_state()->ClearSPDelta();
919 frame_access_state()->SetFrameAccessToDefault();
922 case kArchTailCallWasm: {
925 if (instr->InputAt(0)->IsImmediate()) {
926 Constant constant =
i.ToConstant(instr->InputAt(0));
927 #ifdef V8_TARGET_ARCH_S390X 928 Address wasm_code =
static_cast<Address
>(constant.ToInt64());
930 Address wasm_code =
static_cast<Address
>(constant.ToInt32());
932 __ Jump(wasm_code, constant.rmode());
934 __ Jump(
i.InputRegister(0));
936 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
937 frame_access_state()->ClearSPDelta();
938 frame_access_state()->SetFrameAccessToDefault();
941 case kArchTailCallAddress: {
942 CHECK(!instr->InputAt(0)->IsImmediate());
943 Register reg =
i.InputRegister(0);
945 HasCallDescriptorFlag(instr, CallDescriptor::kFixedTargetRegister),
946 reg == kJavaScriptCallCodeStartRegister);
948 frame_access_state()->ClearSPDelta();
949 frame_access_state()->SetFrameAccessToDefault();
952 case kArchCallJSFunction: {
955 Register func =
i.InputRegister(0);
956 if (FLAG_debug_code) {
958 __ LoadP(kScratchReg,
959 FieldMemOperand(func, JSFunction::kContextOffset));
960 __ cmp(cp, kScratchReg);
961 __ Assert(eq, AbortReason::kWrongFunctionContext);
963 static_assert(kJavaScriptCallCodeStartRegister == r5,
"ABI mismatch");
964 __ LoadP(r5, FieldMemOperand(func, JSFunction::kCodeOffset));
965 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
967 RecordCallPosition(instr);
968 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
969 frame_access_state()->ClearSPDelta();
972 case kArchPrepareCallCFunction: {
973 int const num_parameters = MiscField::decode(instr->opcode());
974 __ PrepareCallCFunction(num_parameters, kScratchReg);
976 frame_access_state()->SetFrameAccessToFP();
979 case kArchSaveCallerRegisters: {
981 static_cast<SaveFPRegsMode
>(MiscField::decode(instr->opcode()));
982 DCHECK(fp_mode_ == kDontSaveFPRegs || fp_mode_ == kSaveFPRegs);
984 int bytes = __ PushCallerSaved(fp_mode_, kReturnRegister0);
985 DCHECK_EQ(0, bytes % kPointerSize);
986 DCHECK_EQ(0, frame_access_state()->sp_delta());
987 frame_access_state()->IncreaseSPDelta(bytes / kPointerSize);
988 DCHECK(!caller_registers_saved_);
989 caller_registers_saved_ =
true;
992 case kArchRestoreCallerRegisters: {
994 static_cast<SaveFPRegsMode>(MiscField::decode(instr->opcode())));
995 DCHECK(fp_mode_ == kDontSaveFPRegs || fp_mode_ == kSaveFPRegs);
997 int bytes = __ PopCallerSaved(fp_mode_, kReturnRegister0);
998 frame_access_state()->IncreaseSPDelta(-(bytes / kPointerSize));
999 DCHECK_EQ(0, frame_access_state()->sp_delta());
1000 DCHECK(caller_registers_saved_);
1001 caller_registers_saved_ =
false;
1004 case kArchPrepareTailCall:
1005 AssemblePrepareTailCall();
1008 #ifdef V8_TARGET_ARCH_PPC64 1009 __ RecordComment(reinterpret_cast<const char*>(
i.InputInt64(0)));
1011 __ RecordComment(reinterpret_cast<const char*>(
i.InputInt32(0)));
1014 case kArchCallCFunction: {
1015 int const num_parameters = MiscField::decode(instr->opcode());
1016 if (instr->InputAt(0)->IsImmediate()) {
1017 ExternalReference ref =
i.InputExternalReference(0);
1018 __ CallCFunction(ref, num_parameters);
1020 Register func =
i.InputRegister(0);
1021 __ CallCFunction(func, num_parameters);
1023 frame_access_state()->SetFrameAccessToDefault();
1029 frame_access_state()->ClearSPDelta();
1030 if (caller_registers_saved_) {
1037 __ RequiredStackSizeForCallerSaved(fp_mode_, kReturnRegister0);
1038 frame_access_state()->IncreaseSPDelta(bytes / kPointerSize);
1043 AssembleArchJump(
i.InputRpo(0));
1044 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1046 case kArchBinarySearchSwitch:
1047 AssembleArchBinarySearchSwitch(instr);
1049 case kArchLookupSwitch:
1050 AssembleArchLookupSwitch(instr);
1051 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1053 case kArchTableSwitch:
1054 AssembleArchTableSwitch(instr);
1055 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1057 case kArchDebugAbort:
1058 DCHECK(
i.InputRegister(0) == r4);
1059 if (!frame_access_state()->has_frame()) {
1062 FrameScope scope(tasm(), StackFrame::NONE);
1063 __ Call(isolate()->builtins()->builtin_handle(Builtins::kAbortJS),
1064 RelocInfo::CODE_TARGET);
1066 __ Call(isolate()->builtins()->builtin_handle(Builtins::kAbortJS),
1067 RelocInfo::CODE_TARGET);
1069 __ stop(
"kArchDebugAbort");
1071 case kArchDebugBreak:
1072 __ stop(
"kArchDebugBreak");
1075 case kArchThrowTerminator:
1077 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1079 case kArchDeoptimize: {
1080 int deopt_state_id =
1081 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
1082 CodeGenResult result =
1083 AssembleDeoptimizerCall(deopt_state_id, current_source_position_);
1084 if (result != kSuccess)
return result;
1088 AssembleReturn(instr->InputAt(0));
1089 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1091 case kArchStackPointer:
1092 __ mr(
i.OutputRegister(), sp);
1093 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1095 case kArchFramePointer:
1096 __ mr(
i.OutputRegister(), fp);
1097 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1099 case kArchParentFramePointer:
1100 if (frame_access_state()->has_frame()) {
1101 __ LoadP(
i.OutputRegister(), MemOperand(fp, 0));
1103 __ mr(
i.OutputRegister(), fp);
1106 case kArchTruncateDoubleToI:
1107 __ TruncateDoubleToI(isolate(), zone(),
i.OutputRegister(),
1108 i.InputDoubleRegister(0), DetermineStubCallMode());
1109 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1111 case kArchStoreWithWriteBarrier: {
1112 RecordWriteMode mode =
1113 static_cast<RecordWriteMode
>(MiscField::decode(instr->opcode()));
1114 Register
object =
i.InputRegister(0);
1115 Register value =
i.InputRegister(2);
1116 Register scratch0 =
i.TempRegister(0);
1117 Register scratch1 =
i.TempRegister(1);
1118 OutOfLineRecordWrite* ool;
1120 AddressingMode addressing_mode =
1121 AddressingModeField::decode(instr->opcode());
1122 if (addressing_mode == kMode_MRI) {
1123 int32_t offset =
i.InputInt32(1);
1125 OutOfLineRecordWrite(
this,
object, offset, value, scratch0,
1126 scratch1, mode, DetermineStubCallMode());
1127 __ StoreP(value, MemOperand(
object, offset));
1129 DCHECK_EQ(kMode_MRR, addressing_mode);
1130 Register offset(
i.InputRegister(1));
1132 OutOfLineRecordWrite(
this,
object, offset, value, scratch0,
1133 scratch1, mode, DetermineStubCallMode());
1134 __ StorePX(value, MemOperand(
object, offset));
1136 __ CheckPageFlag(
object, scratch0,
1137 MemoryChunk::kPointersFromHereAreInterestingMask, ne,
1139 __ bind(ool->exit());
1142 case kArchStackSlot: {
1143 FrameOffset offset =
1144 frame_access_state()->GetFrameOffset(
i.InputInt32(0));
1145 __ addi(
i.OutputRegister(), offset.from_stack_pointer() ? sp : fp,
1146 Operand(offset.offset()));
1149 case kArchWordPoisonOnSpeculation:
1150 __ and_(
i.OutputRegister(),
i.InputRegister(0),
1151 kSpeculationPoisonRegister);
1154 if (HasRegisterInput(instr, 1)) {
1155 __ and_(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1),
1158 __ andi(
i.OutputRegister(),
i.InputRegister(0),
i.InputImmediate(1));
1161 case kPPC_AndComplement:
1162 __ andc(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1),
1166 if (HasRegisterInput(instr, 1)) {
1167 __ orx(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1),
1170 __ ori(
i.OutputRegister(),
i.InputRegister(0),
i.InputImmediate(1));
1171 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1174 case kPPC_OrComplement:
1175 __ orc(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1),
1179 if (HasRegisterInput(instr, 1)) {
1180 __ xor_(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1),
1183 __ xori(
i.OutputRegister(),
i.InputRegister(0),
i.InputImmediate(1));
1184 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1187 case kPPC_ShiftLeft32:
1188 ASSEMBLE_BINOP_RC(slw, slwi);
1190 #if V8_TARGET_ARCH_PPC64 1191 case kPPC_ShiftLeft64:
1192 ASSEMBLE_BINOP_RC(sld, sldi);
1195 case kPPC_ShiftRight32:
1196 ASSEMBLE_BINOP_RC(srw, srwi);
1198 #if V8_TARGET_ARCH_PPC64 1199 case kPPC_ShiftRight64:
1200 ASSEMBLE_BINOP_RC(srd, srdi);
1203 case kPPC_ShiftRightAlg32:
1204 ASSEMBLE_BINOP_INT_RC(sraw, srawi);
1206 #if V8_TARGET_ARCH_PPC64 1207 case kPPC_ShiftRightAlg64:
1208 ASSEMBLE_BINOP_INT_RC(srad, sradi);
1211 #if !V8_TARGET_ARCH_PPC64 1217 __ addc(
i.OutputRegister(0),
i.InputRegister(0),
i.InputRegister(2));
1218 __ adde(
i.OutputRegister(1),
i.InputRegister(1),
i.InputRegister(3));
1219 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1226 __ subc(
i.OutputRegister(0),
i.InputRegister(0),
i.InputRegister(2));
1227 __ sube(
i.OutputRegister(1),
i.InputRegister(1),
i.InputRegister(3));
1228 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1235 __ mullw(
i.TempRegister(0),
i.InputRegister(0),
i.InputRegister(3));
1236 __ mullw(
i.TempRegister(1),
i.InputRegister(2),
i.InputRegister(1));
1237 __ add(
i.TempRegister(0),
i.TempRegister(0),
i.TempRegister(1));
1238 __ mullw(
i.OutputRegister(0),
i.InputRegister(0),
i.InputRegister(2));
1239 __ mulhwu(
i.OutputRegister(1),
i.InputRegister(0),
i.InputRegister(2));
1240 __ add(
i.OutputRegister(1),
i.OutputRegister(1),
i.TempRegister(0));
1242 case kPPC_ShiftLeftPair: {
1243 Register second_output =
1244 instr->OutputCount() >= 2 ?
i.OutputRegister(1) :
i.TempRegister(0);
1245 if (instr->InputAt(2)->IsImmediate()) {
1246 __ ShiftLeftPair(
i.OutputRegister(0), second_output,
i.InputRegister(0),
1247 i.InputRegister(1),
i.InputInt32(2));
1249 __ ShiftLeftPair(
i.OutputRegister(0), second_output,
i.InputRegister(0),
1250 i.InputRegister(1), kScratchReg,
i.InputRegister(2));
1254 case kPPC_ShiftRightPair: {
1255 Register second_output =
1256 instr->OutputCount() >= 2 ?
i.OutputRegister(1) :
i.TempRegister(0);
1257 if (instr->InputAt(2)->IsImmediate()) {
1258 __ ShiftRightPair(
i.OutputRegister(0), second_output,
1259 i.InputRegister(0),
i.InputRegister(1),
1262 __ ShiftRightPair(
i.OutputRegister(0), second_output,
1263 i.InputRegister(0),
i.InputRegister(1), kScratchReg,
1264 i.InputRegister(2));
1268 case kPPC_ShiftRightAlgPair: {
1269 Register second_output =
1270 instr->OutputCount() >= 2 ?
i.OutputRegister(1) :
i.TempRegister(0);
1271 if (instr->InputAt(2)->IsImmediate()) {
1272 __ ShiftRightAlgPair(
i.OutputRegister(0), second_output,
1273 i.InputRegister(0),
i.InputRegister(1),
1276 __ ShiftRightAlgPair(
i.OutputRegister(0), second_output,
1277 i.InputRegister(0),
i.InputRegister(1),
1278 kScratchReg,
i.InputRegister(2));
1283 case kPPC_RotRight32:
1284 if (HasRegisterInput(instr, 1)) {
1285 __ subfic(kScratchReg,
i.InputRegister(1), Operand(32));
1286 __ rotlw(
i.OutputRegister(),
i.InputRegister(0), kScratchReg,
1289 int sh =
i.InputInt32(1);
1290 __ rotrwi(
i.OutputRegister(),
i.InputRegister(0), sh,
i.OutputRCBit());
1293 #if V8_TARGET_ARCH_PPC64 1294 case kPPC_RotRight64:
1295 if (HasRegisterInput(instr, 1)) {
1296 __ subfic(kScratchReg,
i.InputRegister(1), Operand(64));
1297 __ rotld(
i.OutputRegister(),
i.InputRegister(0), kScratchReg,
1300 int sh =
i.InputInt32(1);
1301 __ rotrdi(
i.OutputRegister(),
i.InputRegister(0), sh,
i.OutputRCBit());
1306 __ notx(
i.OutputRegister(),
i.InputRegister(0),
i.OutputRCBit());
1308 case kPPC_RotLeftAndMask32:
1309 __ rlwinm(
i.OutputRegister(),
i.InputRegister(0),
i.InputInt32(1),
1310 31 -
i.InputInt32(2), 31 -
i.InputInt32(3),
i.OutputRCBit());
1312 #if V8_TARGET_ARCH_PPC64 1313 case kPPC_RotLeftAndClear64:
1314 __ rldic(
i.OutputRegister(),
i.InputRegister(0),
i.InputInt32(1),
1315 63 -
i.InputInt32(2),
i.OutputRCBit());
1317 case kPPC_RotLeftAndClearLeft64:
1318 __ rldicl(
i.OutputRegister(),
i.InputRegister(0),
i.InputInt32(1),
1319 63 -
i.InputInt32(2),
i.OutputRCBit());
1321 case kPPC_RotLeftAndClearRight64:
1322 __ rldicr(
i.OutputRegister(),
i.InputRegister(0),
i.InputInt32(1),
1323 63 -
i.InputInt32(2),
i.OutputRCBit());
1327 #if V8_TARGET_ARCH_PPC64 1328 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
1329 ASSEMBLE_ADD_WITH_OVERFLOW();
1332 if (HasRegisterInput(instr, 1)) {
1333 __ add(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1),
1334 LeaveOE,
i.OutputRCBit());
1336 __ addi(
i.OutputRegister(),
i.InputRegister(0),
i.InputImmediate(1));
1337 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1339 __ extsw(
i.OutputRegister(),
i.OutputRegister());
1340 #if V8_TARGET_ARCH_PPC64 1344 #if V8_TARGET_ARCH_PPC64 1346 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
1347 ASSEMBLE_ADD_WITH_OVERFLOW();
1349 if (HasRegisterInput(instr, 1)) {
1350 __ add(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1),
1351 LeaveOE,
i.OutputRCBit());
1353 __ addi(
i.OutputRegister(),
i.InputRegister(0),
i.InputImmediate(1));
1354 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1359 case kPPC_AddWithOverflow32:
1360 ASSEMBLE_ADD_WITH_OVERFLOW32();
1362 case kPPC_AddDouble:
1363 ASSEMBLE_FLOAT_BINOP_RC(fadd, MiscField::decode(instr->opcode()));
1366 #if V8_TARGET_ARCH_PPC64 1367 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
1368 ASSEMBLE_SUB_WITH_OVERFLOW();
1371 if (HasRegisterInput(instr, 1)) {
1372 __ sub(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1),
1373 LeaveOE,
i.OutputRCBit());
1375 if (is_int16(
i.InputImmediate(1).immediate())) {
1376 __ subi(
i.OutputRegister(),
i.InputRegister(0),
1377 i.InputImmediate(1));
1378 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1380 __ mov(kScratchReg,
i.InputImmediate(1));
1381 __ sub(
i.OutputRegister(),
i.InputRegister(0), kScratchReg, LeaveOE,
1385 #if V8_TARGET_ARCH_PPC64 1389 case kPPC_SubWithOverflow32:
1390 ASSEMBLE_SUB_WITH_OVERFLOW32();
1392 case kPPC_SubDouble:
1393 ASSEMBLE_FLOAT_BINOP_RC(fsub, MiscField::decode(instr->opcode()));
1396 __ mullw(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1),
1397 LeaveOE,
i.OutputRCBit());
1399 #if V8_TARGET_ARCH_PPC64 1401 __ mulld(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1),
1402 LeaveOE,
i.OutputRCBit());
1406 case kPPC_Mul32WithHigh32:
1407 if (
i.OutputRegister(0) ==
i.InputRegister(0) ||
1408 i.OutputRegister(0) ==
i.InputRegister(1) ||
1409 i.OutputRegister(1) ==
i.InputRegister(0) ||
1410 i.OutputRegister(1) ==
i.InputRegister(1)) {
1411 __ mullw(kScratchReg,
i.InputRegister(0),
i.InputRegister(1));
1412 __ mulhw(
i.OutputRegister(1),
i.InputRegister(0),
1413 i.InputRegister(1));
1414 __ mr(
i.OutputRegister(0), kScratchReg);
1416 __ mullw(
i.OutputRegister(0),
i.InputRegister(0),
1417 i.InputRegister(1));
1418 __ mulhw(
i.OutputRegister(1),
i.InputRegister(0),
1419 i.InputRegister(1));
1422 case kPPC_MulHigh32:
1423 __ mulhw(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1),
1426 case kPPC_MulHighU32:
1427 __ mulhwu(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1),
1430 case kPPC_MulDouble:
1431 ASSEMBLE_FLOAT_BINOP_RC(fmul, MiscField::decode(instr->opcode()));
1434 __ divw(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1));
1435 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1437 #if V8_TARGET_ARCH_PPC64 1439 __ divd(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1));
1440 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1444 __ divwu(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1));
1445 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1447 #if V8_TARGET_ARCH_PPC64 1449 __ divdu(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1));
1450 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1453 case kPPC_DivDouble:
1454 ASSEMBLE_FLOAT_BINOP_RC(fdiv, MiscField::decode(instr->opcode()));
1457 if (CpuFeatures::IsSupported(MODULO)) {
1458 __ modsw(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1));
1460 ASSEMBLE_MODULO(divw, mullw);
1463 #if V8_TARGET_ARCH_PPC64 1465 if (CpuFeatures::IsSupported(MODULO)) {
1466 __ modsd(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1));
1468 ASSEMBLE_MODULO(divd, mulld);
1473 if (CpuFeatures::IsSupported(MODULO)) {
1474 __ moduw(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1));
1476 ASSEMBLE_MODULO(divwu, mullw);
1479 #if V8_TARGET_ARCH_PPC64 1481 if (CpuFeatures::IsSupported(MODULO)) {
1482 __ modud(
i.OutputRegister(),
i.InputRegister(0),
i.InputRegister(1));
1484 ASSEMBLE_MODULO(divdu, mulld);
1488 case kPPC_ModDouble:
1491 ASSEMBLE_FLOAT_MODULO();
1493 case kIeee754Float64Acos:
1494 ASSEMBLE_IEEE754_UNOP(acos);
1496 case kIeee754Float64Acosh:
1497 ASSEMBLE_IEEE754_UNOP(acosh);
1499 case kIeee754Float64Asin:
1500 ASSEMBLE_IEEE754_UNOP(asin);
1502 case kIeee754Float64Asinh:
1503 ASSEMBLE_IEEE754_UNOP(asinh);
1505 case kIeee754Float64Atan:
1506 ASSEMBLE_IEEE754_UNOP(atan);
1508 case kIeee754Float64Atan2:
1509 ASSEMBLE_IEEE754_BINOP(atan2);
1511 case kIeee754Float64Atanh:
1512 ASSEMBLE_IEEE754_UNOP(atanh);
1514 case kIeee754Float64Tan:
1515 ASSEMBLE_IEEE754_UNOP(tan);
1517 case kIeee754Float64Tanh:
1518 ASSEMBLE_IEEE754_UNOP(tanh);
1520 case kIeee754Float64Cbrt:
1521 ASSEMBLE_IEEE754_UNOP(cbrt);
1523 case kIeee754Float64Sin:
1524 ASSEMBLE_IEEE754_UNOP(sin);
1526 case kIeee754Float64Sinh:
1527 ASSEMBLE_IEEE754_UNOP(sinh);
1529 case kIeee754Float64Cos:
1530 ASSEMBLE_IEEE754_UNOP(cos);
1532 case kIeee754Float64Cosh:
1533 ASSEMBLE_IEEE754_UNOP(cosh);
1535 case kIeee754Float64Exp:
1536 ASSEMBLE_IEEE754_UNOP(exp);
1538 case kIeee754Float64Expm1:
1539 ASSEMBLE_IEEE754_UNOP(expm1);
1541 case kIeee754Float64Log:
1542 ASSEMBLE_IEEE754_UNOP(log);
1544 case kIeee754Float64Log1p:
1545 ASSEMBLE_IEEE754_UNOP(log1p);
1547 case kIeee754Float64Log2:
1548 ASSEMBLE_IEEE754_UNOP(log2);
1550 case kIeee754Float64Log10:
1551 ASSEMBLE_IEEE754_UNOP(log10);
1553 case kIeee754Float64Pow: {
1554 __ Call(BUILTIN_CODE(isolate(), MathPowInternal), RelocInfo::CODE_TARGET);
1559 __ neg(
i.OutputRegister(),
i.InputRegister(0), LeaveOE,
i.OutputRCBit());
1561 case kPPC_MaxDouble:
1562 ASSEMBLE_FLOAT_MAX();
1564 case kPPC_MinDouble:
1565 ASSEMBLE_FLOAT_MIN();
1567 case kPPC_AbsDouble:
1568 ASSEMBLE_FLOAT_UNOP_RC(fabs, 0);
1570 case kPPC_SqrtDouble:
1571 ASSEMBLE_FLOAT_UNOP_RC(fsqrt, MiscField::decode(instr->opcode()));
1573 case kPPC_FloorDouble:
1574 ASSEMBLE_FLOAT_UNOP_RC(frim, MiscField::decode(instr->opcode()));
1576 case kPPC_CeilDouble:
1577 ASSEMBLE_FLOAT_UNOP_RC(frip, MiscField::decode(instr->opcode()));
1579 case kPPC_TruncateDouble:
1580 ASSEMBLE_FLOAT_UNOP_RC(friz, MiscField::decode(instr->opcode()));
1582 case kPPC_RoundDouble:
1583 ASSEMBLE_FLOAT_UNOP_RC(frin, MiscField::decode(instr->opcode()));
1585 case kPPC_NegDouble:
1586 ASSEMBLE_FLOAT_UNOP_RC(fneg, 0);
1589 __ cntlzw(
i.OutputRegister(),
i.InputRegister(0));
1590 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1592 #if V8_TARGET_ARCH_PPC64 1594 __ cntlzd(
i.OutputRegister(),
i.InputRegister(0));
1595 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1599 __ popcntw(
i.OutputRegister(),
i.InputRegister(0));
1600 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1602 #if V8_TARGET_ARCH_PPC64 1604 __ popcntd(
i.OutputRegister(),
i.InputRegister(0));
1605 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1609 ASSEMBLE_COMPARE(cmpw, cmplw);
1611 #if V8_TARGET_ARCH_PPC64 1613 ASSEMBLE_COMPARE(cmp, cmpl);
1616 case kPPC_CmpDouble:
1617 ASSEMBLE_FLOAT_COMPARE(fcmpu);
1620 if (HasRegisterInput(instr, 1)) {
1621 __ and_(r0,
i.InputRegister(0),
i.InputRegister(1),
i.OutputRCBit());
1623 __ andi(r0,
i.InputRegister(0),
i.InputImmediate(1));
1625 #if V8_TARGET_ARCH_PPC64 1626 __ extsw(r0, r0,
i.OutputRCBit());
1628 DCHECK_EQ(SetRC,
i.OutputRCBit());
1630 #if V8_TARGET_ARCH_PPC64 1632 if (HasRegisterInput(instr, 1)) {
1633 __ and_(r0,
i.InputRegister(0),
i.InputRegister(1),
i.OutputRCBit());
1635 __ andi(r0,
i.InputRegister(0),
i.InputImmediate(1));
1637 DCHECK_EQ(SetRC,
i.OutputRCBit());
1640 case kPPC_Float64SilenceNaN: {
1641 DoubleRegister value =
i.InputDoubleRegister(0);
1642 DoubleRegister result =
i.OutputDoubleRegister();
1643 __ CanonicalizeNaN(result, value);
1647 if (instr->InputAt(0)->IsFPRegister()) {
1648 LocationOperand* op = LocationOperand::cast(instr->InputAt(0));
1649 if (op->representation() == MachineRepresentation::kFloat64) {
1650 __ StoreDoubleU(
i.InputDoubleRegister(0),
1651 MemOperand(sp, -kDoubleSize), r0);
1652 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1654 DCHECK_EQ(MachineRepresentation::kFloat32, op->representation());
1655 __ StoreSingleU(
i.InputDoubleRegister(0),
1656 MemOperand(sp, -kPointerSize), r0);
1657 frame_access_state()->IncreaseSPDelta(1);
1660 __ StorePU(
i.InputRegister(0), MemOperand(sp, -kPointerSize), r0);
1661 frame_access_state()->IncreaseSPDelta(1);
1663 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1665 case kPPC_PushFrame: {
1666 int num_slots =
i.InputInt32(1);
1667 if (instr->InputAt(0)->IsFPRegister()) {
1668 LocationOperand* op = LocationOperand::cast(instr->InputAt(0));
1669 if (op->representation() == MachineRepresentation::kFloat64) {
1670 __ StoreDoubleU(
i.InputDoubleRegister(0),
1671 MemOperand(sp, -num_slots * kPointerSize), r0);
1673 DCHECK_EQ(MachineRepresentation::kFloat32, op->representation());
1674 __ StoreSingleU(
i.InputDoubleRegister(0),
1675 MemOperand(sp, -num_slots * kPointerSize), r0);
1678 __ StorePU(
i.InputRegister(0),
1679 MemOperand(sp, -num_slots * kPointerSize), r0);
1683 case kPPC_StoreToStackSlot: {
1684 int slot =
i.InputInt32(1);
1685 if (instr->InputAt(0)->IsFPRegister()) {
1686 LocationOperand* op = LocationOperand::cast(instr->InputAt(0));
1687 if (op->representation() == MachineRepresentation::kFloat64) {
1688 __ StoreDouble(
i.InputDoubleRegister(0),
1689 MemOperand(sp, slot * kPointerSize), r0);
1691 DCHECK_EQ(MachineRepresentation::kFloat32, op->representation());
1692 __ StoreSingle(
i.InputDoubleRegister(0),
1693 MemOperand(sp, slot * kPointerSize), r0);
1696 __ StoreP(
i.InputRegister(0), MemOperand(sp, slot * kPointerSize), r0);
1700 case kPPC_ExtendSignWord8:
1701 __ extsb(
i.OutputRegister(),
i.InputRegister(0));
1702 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1704 case kPPC_ExtendSignWord16:
1705 __ extsh(
i.OutputRegister(),
i.InputRegister(0));
1706 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1708 #if V8_TARGET_ARCH_PPC64 1709 case kPPC_ExtendSignWord32:
1710 __ extsw(
i.OutputRegister(),
i.InputRegister(0));
1711 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1713 case kPPC_Uint32ToUint64:
1715 __ clrldi(
i.OutputRegister(),
i.InputRegister(0), Operand(32));
1716 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1718 case kPPC_Int64ToInt32:
1719 __ extsw(
i.OutputRegister(),
i.InputRegister(0));
1720 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1722 case kPPC_Int64ToFloat32:
1723 __ ConvertInt64ToFloat(
i.InputRegister(0),
i.OutputDoubleRegister());
1724 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1726 case kPPC_Int64ToDouble:
1727 __ ConvertInt64ToDouble(
i.InputRegister(0),
i.OutputDoubleRegister());
1728 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1730 case kPPC_Uint64ToFloat32:
1731 __ ConvertUnsignedInt64ToFloat(
i.InputRegister(0),
1732 i.OutputDoubleRegister());
1733 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1735 case kPPC_Uint64ToDouble:
1736 __ ConvertUnsignedInt64ToDouble(
i.InputRegister(0),
1737 i.OutputDoubleRegister());
1738 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1741 case kPPC_Int32ToFloat32:
1742 __ ConvertIntToFloat(
i.InputRegister(0),
i.OutputDoubleRegister());
1743 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1745 case kPPC_Int32ToDouble:
1746 __ ConvertIntToDouble(
i.InputRegister(0),
i.OutputDoubleRegister());
1747 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1749 case kPPC_Uint32ToFloat32:
1750 __ ConvertUnsignedIntToFloat(
i.InputRegister(0),
1751 i.OutputDoubleRegister());
1752 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1754 case kPPC_Uint32ToDouble:
1755 __ ConvertUnsignedIntToDouble(
i.InputRegister(0),
1756 i.OutputDoubleRegister());
1757 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1759 case kPPC_DoubleToInt32:
1760 case kPPC_DoubleToUint32:
1761 case kPPC_DoubleToInt64: {
1762 #if V8_TARGET_ARCH_PPC64 1763 bool check_conversion =
1764 (opcode == kPPC_DoubleToInt64 &&
i.OutputCount() > 1);
1765 if (check_conversion) {
1769 __ ConvertDoubleToInt64(
i.InputDoubleRegister(0),
1770 #if !V8_TARGET_ARCH_PPC64 1773 i.OutputRegister(0), kScratchDoubleReg);
1774 #if V8_TARGET_ARCH_PPC64 1775 if (check_conversion) {
1778 int crbit = v8::internal::Assembler::encode_crbit(
1779 cr, static_cast<CRBit>(VXCVI % CRWIDTH));
1780 __ mcrfs(cr, VXCVI);
1781 if (CpuFeatures::IsSupported(ISELECT)) {
1782 __ li(
i.OutputRegister(1), Operand(1));
1783 __ isel(
i.OutputRegister(1), r0,
i.OutputRegister(1), crbit);
1785 __ li(
i.OutputRegister(1), Operand::Zero());
1786 __ bc(v8::internal::kInstrSize * 2, BT, crbit);
1787 __ li(
i.OutputRegister(1), Operand(1));
1791 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1794 #if V8_TARGET_ARCH_PPC64 1795 case kPPC_DoubleToUint64: {
1796 bool check_conversion = (
i.OutputCount() > 1);
1797 if (check_conversion) {
1800 __ ConvertDoubleToUnsignedInt64(
i.InputDoubleRegister(0),
1801 i.OutputRegister(0), kScratchDoubleReg);
1802 if (check_conversion) {
1805 int crbit = v8::internal::Assembler::encode_crbit(
1806 cr, static_cast<CRBit>(VXCVI % CRWIDTH));
1807 __ mcrfs(cr, VXCVI);
1808 if (CpuFeatures::IsSupported(ISELECT)) {
1809 __ li(
i.OutputRegister(1), Operand(1));
1810 __ isel(
i.OutputRegister(1), r0,
i.OutputRegister(1), crbit);
1812 __ li(
i.OutputRegister(1), Operand::Zero());
1813 __ bc(v8::internal::kInstrSize * 2, BT, crbit);
1814 __ li(
i.OutputRegister(1), Operand(1));
1817 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1821 case kPPC_DoubleToFloat32:
1822 ASSEMBLE_FLOAT_UNOP_RC(frsp, 0);
1824 case kPPC_Float32ToDouble:
1826 __ Move(
i.OutputDoubleRegister(),
i.InputDoubleRegister(0));
1827 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1829 case kPPC_DoubleExtractLowWord32:
1830 __ MovDoubleLowToInt(
i.OutputRegister(),
i.InputDoubleRegister(0));
1831 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1833 case kPPC_DoubleExtractHighWord32:
1834 __ MovDoubleHighToInt(
i.OutputRegister(),
i.InputDoubleRegister(0));
1835 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1837 case kPPC_DoubleInsertLowWord32:
1838 __ InsertDoubleLow(
i.OutputDoubleRegister(),
i.InputRegister(1), r0);
1839 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1841 case kPPC_DoubleInsertHighWord32:
1842 __ InsertDoubleHigh(
i.OutputDoubleRegister(),
i.InputRegister(1), r0);
1843 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1845 case kPPC_DoubleConstruct:
1846 #if V8_TARGET_ARCH_PPC64 1847 __ MovInt64ComponentsToDouble(
i.OutputDoubleRegister(),
1848 i.InputRegister(0),
i.InputRegister(1), r0);
1850 __ MovInt64ToDouble(
i.OutputDoubleRegister(),
i.InputRegister(0),
1851 i.InputRegister(1));
1853 DCHECK_EQ(LeaveRC,
i.OutputRCBit());
1855 case kPPC_BitcastFloat32ToInt32:
1856 __ MovFloatToInt(
i.OutputRegister(),
i.InputDoubleRegister(0));
1858 case kPPC_BitcastInt32ToFloat32:
1859 __ MovIntToFloat(
i.OutputDoubleRegister(),
i.InputRegister(0));
1861 #if V8_TARGET_ARCH_PPC64 1862 case kPPC_BitcastDoubleToInt64:
1863 __ MovDoubleToInt64(
i.OutputRegister(),
i.InputDoubleRegister(0));
1865 case kPPC_BitcastInt64ToDouble:
1866 __ MovInt64ToDouble(
i.OutputDoubleRegister(),
i.InputRegister(0));
1869 case kPPC_LoadWordU8:
1870 ASSEMBLE_LOAD_INTEGER(lbz, lbzx);
1871 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
1873 case kPPC_LoadWordS8:
1874 ASSEMBLE_LOAD_INTEGER(lbz, lbzx);
1875 __ extsb(
i.OutputRegister(),
i.OutputRegister());
1876 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
1878 case kPPC_LoadWordU16:
1879 ASSEMBLE_LOAD_INTEGER(lhz, lhzx);
1880 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
1882 case kPPC_LoadWordS16:
1883 ASSEMBLE_LOAD_INTEGER(lha, lhax);
1884 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
1886 case kPPC_LoadWordU32:
1887 ASSEMBLE_LOAD_INTEGER(lwz, lwzx);
1888 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
1890 case kPPC_LoadWordS32:
1891 ASSEMBLE_LOAD_INTEGER(lwa, lwax);
1892 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
1894 #if V8_TARGET_ARCH_PPC64 1895 case kPPC_LoadWord64:
1896 ASSEMBLE_LOAD_INTEGER(ld, ldx);
1897 EmitWordLoadPoisoningIfNeeded(
this, instr,
i);
1900 case kPPC_LoadFloat32:
1901 ASSEMBLE_LOAD_FLOAT(lfs, lfsx);
1903 case kPPC_LoadDouble:
1904 ASSEMBLE_LOAD_FLOAT(lfd, lfdx);
1906 case kPPC_StoreWord8:
1907 ASSEMBLE_STORE_INTEGER(stb, stbx);
1909 case kPPC_StoreWord16:
1910 ASSEMBLE_STORE_INTEGER(sth, sthx);
1912 case kPPC_StoreWord32:
1913 ASSEMBLE_STORE_INTEGER(stw, stwx);
1915 #if V8_TARGET_ARCH_PPC64 1916 case kPPC_StoreWord64:
1917 ASSEMBLE_STORE_INTEGER(
std, stdx);
1920 case kPPC_StoreFloat32:
1921 ASSEMBLE_STORE_FLOAT(stfs, stfsx);
1923 case kPPC_StoreDouble:
1924 ASSEMBLE_STORE_FLOAT(stfd, stfdx);
1926 case kWord32AtomicLoadInt8:
1927 case kPPC_AtomicLoadUint8:
1928 case kWord32AtomicLoadInt16:
1929 case kPPC_AtomicLoadUint16:
1930 case kPPC_AtomicLoadWord32:
1931 case kPPC_AtomicLoadWord64:
1932 case kPPC_AtomicStoreUint8:
1933 case kPPC_AtomicStoreUint16:
1934 case kPPC_AtomicStoreWord32:
1935 case kPPC_AtomicStoreWord64:
1938 case kWord32AtomicExchangeInt8:
1939 ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(lbarx, stbcx);
1940 __ extsb(
i.OutputRegister(0),
i.OutputRegister(0));
1942 case kPPC_AtomicExchangeUint8:
1943 ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(lbarx, stbcx);
1945 case kWord32AtomicExchangeInt16:
1946 ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(lharx, sthcx);
1947 __ extsh(
i.OutputRegister(0),
i.OutputRegister(0));
1949 case kPPC_AtomicExchangeUint16:
1950 ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(lharx, sthcx);
1952 case kPPC_AtomicExchangeWord32:
1953 ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(lwarx, stwcx);
1955 case kPPC_AtomicExchangeWord64:
1956 ASSEMBLE_ATOMIC_EXCHANGE_INTEGER(ldarx, stdcx);
1958 case kWord32AtomicCompareExchangeInt8:
1959 ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_SIGN_EXT(cmp, lbarx, stbcx, extsb);
1961 case kPPC_AtomicCompareExchangeUint8:
1962 ASSEMBLE_ATOMIC_COMPARE_EXCHANGE(cmp, lbarx, stbcx);
1964 case kWord32AtomicCompareExchangeInt16:
1965 ASSEMBLE_ATOMIC_COMPARE_EXCHANGE_SIGN_EXT(cmp, lharx, sthcx, extsh);
1967 case kPPC_AtomicCompareExchangeUint16:
1968 ASSEMBLE_ATOMIC_COMPARE_EXCHANGE(cmp, lharx, sthcx);
1970 case kPPC_AtomicCompareExchangeWord32:
1971 ASSEMBLE_ATOMIC_COMPARE_EXCHANGE(cmpw, lwarx, stwcx);
1973 case kPPC_AtomicCompareExchangeWord64:
1974 ASSEMBLE_ATOMIC_COMPARE_EXCHANGE(cmp, ldarx, stdcx);
1977 #define ATOMIC_BINOP_CASE(op, inst) \ 1978 case kPPC_Atomic##op##Int8: \ 1979 ASSEMBLE_ATOMIC_BINOP_SIGN_EXT(inst, lbarx, stbcx, extsb); \ 1981 case kPPC_Atomic##op##Uint8: \ 1982 ASSEMBLE_ATOMIC_BINOP(inst, lbarx, stbcx); \ 1984 case kPPC_Atomic##op##Int16: \ 1985 ASSEMBLE_ATOMIC_BINOP_SIGN_EXT(inst, lharx, sthcx, extsh); \ 1987 case kPPC_Atomic##op##Uint16: \ 1988 ASSEMBLE_ATOMIC_BINOP(inst, lharx, sthcx); \ 1990 case kPPC_Atomic##op##Int32: \ 1991 ASSEMBLE_ATOMIC_BINOP_SIGN_EXT(inst, lwarx, stwcx, extsw); \ 1993 case kPPC_Atomic##op##Uint32: \ 1994 ASSEMBLE_ATOMIC_BINOP(inst, lwarx, stwcx); \ 1996 case kPPC_Atomic##op##Int64: \ 1997 case kPPC_Atomic##op##Uint64: \ 1998 ASSEMBLE_ATOMIC_BINOP(inst, ldarx, stdcx); \ 2000 ATOMIC_BINOP_CASE(Add, add)
2001 ATOMIC_BINOP_CASE(Sub, sub)
2002 ATOMIC_BINOP_CASE(And, and_)
2003 ATOMIC_BINOP_CASE(Or, orx)
2004 ATOMIC_BINOP_CASE(Xor, xor_)
2005 #undef ATOMIC_BINOP_CASE 2007 case kPPC_ByteRev32: {
2008 Register input =
i.InputRegister(0);
2009 Register output =
i.OutputRegister();
2010 Register temp1 = r0;
2011 __ rotlwi(temp1, input, 8);
2012 __ rlwimi(temp1, input, 24, 0, 7);
2013 __ rlwimi(temp1, input, 24, 16, 23);
2014 __ extsw(output, temp1);
2017 #ifdef V8_TARGET_ARCH_PPC64 2018 case kPPC_ByteRev64: {
2019 Register input =
i.InputRegister(0);
2020 Register output =
i.OutputRegister();
2021 Register temp1 = r0;
2022 Register temp2 = kScratchReg;
2023 Register temp3 =
i.TempRegister(0);
2024 __ rldicl(temp1, input, 32, 32);
2025 __ rotlwi(temp2, input, 8);
2026 __ rlwimi(temp2, input, 24, 0, 7);
2027 __ rotlwi(temp3, temp1, 8);
2028 __ rlwimi(temp2, input, 24, 16, 23);
2029 __ rlwimi(temp3, temp1, 24, 0, 7);
2030 __ rlwimi(temp3, temp1, 24, 16, 23);
2031 __ rldicr(temp2, temp2, 32, 31);
2032 __ orx(output, temp2, temp3);
2035 #endif // V8_TARGET_ARCH_PPC64 2044 void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
2045 PPCOperandConverter
i(
this, instr);
2046 Label* tlabel = branch->true_label;
2047 Label* flabel = branch->false_label;
2048 ArchOpcode op = instr->arch_opcode();
2049 FlagsCondition condition = branch->condition;
2052 Condition cond = FlagsConditionToCondition(condition, op);
2053 if (op == kPPC_CmpDouble) {
2056 __ bunordered(flabel, cr);
2058 }
else if (cond == gt) {
2059 __ bunordered(tlabel, cr);
2063 __ b(cond, tlabel, cr);
2064 if (!branch->fallthru) __ b(flabel);
2067 void CodeGenerator::AssembleBranchPoisoning(FlagsCondition condition,
2068 Instruction* instr) {
2070 if (condition == kUnorderedEqual || condition == kUnorderedNotEqual ||
2071 condition == kOverflow || condition == kNotOverflow) {
2075 ArchOpcode op = instr->arch_opcode();
2076 condition = NegateFlagsCondition(condition);
2077 __ li(kScratchReg, Operand::Zero());
2078 __ isel(FlagsConditionToCondition(condition, op), kSpeculationPoisonRegister,
2079 kScratchReg, kSpeculationPoisonRegister, cr0);
2082 void CodeGenerator::AssembleArchDeoptBranch(Instruction* instr,
2083 BranchInfo* branch) {
2084 AssembleArchBranch(instr, branch);
2087 void CodeGenerator::AssembleArchJump(RpoNumber target) {
2088 if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
2091 void CodeGenerator::AssembleArchTrap(Instruction* instr,
2092 FlagsCondition condition) {
2093 class OutOfLineTrap final :
public OutOfLineCode {
2095 OutOfLineTrap(CodeGenerator* gen, Instruction* instr)
2096 : OutOfLineCode(gen), instr_(instr), gen_(gen) {}
2098 void Generate() final {
2099 PPCOperandConverter
i(gen_, instr_);
2101 static_cast<TrapId
>(
i.InputInt32(instr_->InputCount() - 1));
2102 GenerateCallToTrap(trap_id);
2106 void GenerateCallToTrap(TrapId trap_id) {
2107 if (trap_id == TrapId::kInvalid) {
2112 __ PrepareCallCFunction(0, 0, cp);
2114 ExternalReference::wasm_call_trap_callback_for_testing(), 0);
2115 __ LeaveFrame(StackFrame::WASM_COMPILED);
2116 auto call_descriptor = gen_->linkage()->GetIncomingDescriptor();
2118 static_cast<int>(call_descriptor->StackParameterCount());
2122 gen_->AssembleSourcePosition(instr_);
2126 __ Call(static_cast<Address>(trap_id), RelocInfo::WASM_STUB_CALL);
2127 ReferenceMap* reference_map =
2128 new (gen_->zone()) ReferenceMap(gen_->zone());
2129 gen_->RecordSafepoint(reference_map, Safepoint::kSimple, 0,
2130 Safepoint::kNoLazyDeopt);
2131 if (FLAG_debug_code) {
2132 __ stop(GetAbortReason(AbortReason::kUnexpectedReturnFromWasmTrap));
2137 Instruction* instr_;
2138 CodeGenerator* gen_;
2140 auto ool =
new (zone()) OutOfLineTrap(
this, instr);
2141 Label* tlabel = ool->entry();
2144 ArchOpcode op = instr->arch_opcode();
2146 Condition cond = FlagsConditionToCondition(condition, op);
2147 if (op == kPPC_CmpDouble) {
2150 __ bunordered(&end, cr);
2152 }
else if (cond == gt) {
2153 __ bunordered(tlabel, cr);
2157 __ b(cond, tlabel, cr);
2162 void CodeGenerator::AssembleArchBoolean(Instruction* instr,
2163 FlagsCondition condition) {
2164 PPCOperandConverter
i(
this, instr);
2166 ArchOpcode op = instr->arch_opcode();
2172 DCHECK_NE(0u, instr->OutputCount());
2173 Register reg =
i.OutputRegister(instr->OutputCount() - 1);
2175 Condition cond = FlagsConditionToCondition(condition, op);
2176 if (op == kPPC_CmpDouble) {
2180 __ li(reg, Operand::Zero());
2181 __ bunordered(&done, cr);
2182 }
else if (cond == gt) {
2184 __ li(reg, Operand(1));
2185 __ bunordered(&done, cr);
2190 if (CpuFeatures::IsSupported(ISELECT)) {
2195 if (reg_value != 1) __ li(reg, Operand(1));
2196 __ li(kScratchReg, Operand::Zero());
2197 __ isel(cond, reg, reg, kScratchReg, cr);
2202 if (reg_value != 1) __ li(reg, Operand(1));
2204 __ isel(NegateCondition(cond), reg, r0, reg, cr);
2211 if (reg_value != 0) __ li(reg, Operand::Zero());
2212 __ b(NegateCondition(cond), &done, cr);
2213 __ li(reg, Operand(1));
2218 void CodeGenerator::AssembleArchBinarySearchSwitch(Instruction* instr) {
2219 PPCOperandConverter
i(
this, instr);
2220 Register input =
i.InputRegister(0);
2221 std::vector<std::pair<int32_t, Label*>> cases;
2222 for (
size_t index = 2; index < instr->InputCount(); index += 2) {
2223 cases.push_back({
i.InputInt32(index + 0), GetLabel(
i.InputRpo(index + 1))});
2225 AssembleArchBinarySearchSwitchRange(input,
i.InputRpo(1), cases.data(),
2226 cases.data() + cases.size());
2229 void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
2230 PPCOperandConverter
i(
this, instr);
2231 Register input =
i.InputRegister(0);
2232 for (
size_t index = 2; index < instr->InputCount(); index += 2) {
2233 __ Cmpwi(input, Operand(
i.InputInt32(index + 0)), r0);
2234 __ beq(GetLabel(
i.InputRpo(index + 1)));
2236 AssembleArchJump(
i.InputRpo(1));
2239 void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
2240 PPCOperandConverter
i(
this, instr);
2241 Register input =
i.InputRegister(0);
2242 int32_t
const case_count =
static_cast<int32_t
>(instr->InputCount() - 2);
2243 Label** cases = zone()->NewArray<Label*>(case_count);
2244 for (int32_t index = 0; index < case_count; ++index) {
2245 cases[index] = GetLabel(
i.InputRpo(index + 2));
2247 Label*
const table = AddJumpTable(cases, case_count);
2248 __ Cmpli(input, Operand(case_count), r0);
2249 __ bge(GetLabel(
i.InputRpo(1)));
2250 __ mov_label_addr(kScratchReg, table);
2251 __ ShiftLeftImm(r0, input, Operand(kPointerSizeLog2));
2252 __ LoadPX(kScratchReg, MemOperand(kScratchReg, r0));
2253 __ Jump(kScratchReg);
2256 void CodeGenerator::FinishFrame(Frame* frame) {
2257 auto call_descriptor = linkage()->GetIncomingDescriptor();
2258 const RegList double_saves = call_descriptor->CalleeSavedFPRegisters();
2261 if (double_saves != 0) {
2262 frame->AlignSavedCalleeRegisterSlots();
2263 DCHECK_EQ(kNumCalleeSavedDoubles,
2264 base::bits::CountPopulation(double_saves));
2265 frame->AllocateSavedCalleeRegisterSlots(kNumCalleeSavedDoubles *
2266 (kDoubleSize / kPointerSize));
2269 const RegList saves = FLAG_enable_embedded_constant_pool
2270 ? call_descriptor->CalleeSavedRegisters() &
2271 ~kConstantPoolRegister.bit()
2272 : call_descriptor->CalleeSavedRegisters();
2275 const int num_saves =
2276 kNumCalleeSaved - 1 - (FLAG_enable_embedded_constant_pool ? 1 : 0);
2277 DCHECK(num_saves == base::bits::CountPopulation(saves));
2278 frame->AllocateSavedCalleeRegisterSlots(num_saves);
2282 void CodeGenerator::AssembleConstructFrame() {
2283 auto call_descriptor = linkage()->GetIncomingDescriptor();
2284 if (frame_access_state()->has_frame()) {
2285 if (call_descriptor->IsCFunctionCall()) {
2286 __ function_descriptor();
2288 if (FLAG_enable_embedded_constant_pool) {
2289 __ Push(r0, fp, kConstantPoolRegister);
2291 __ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset));
2296 }
else if (call_descriptor->IsJSFunctionCall()) {
2298 if (call_descriptor->PushArgumentCount()) {
2299 __ Push(kJavaScriptCallArgCountRegister);
2302 StackFrame::Type type = info()->GetOutputStackFrameType();
2305 __ StubPrologue(type);
2306 if (call_descriptor->IsWasmFunctionCall()) {
2307 __ Push(kWasmInstanceRegister);
2308 }
else if (call_descriptor->IsWasmImportWrapper()) {
2313 __ LoadP(kJSFunctionRegister,
2314 FieldMemOperand(kWasmInstanceRegister, Tuple2::kValue2Offset));
2315 __ LoadP(kWasmInstanceRegister,
2316 FieldMemOperand(kWasmInstanceRegister, Tuple2::kValue1Offset));
2317 __ Push(kWasmInstanceRegister);
2322 int shrink_slots = frame()->GetTotalFrameSlotCount() -
2323 call_descriptor->CalculateFixedFrameSize();
2324 if (info()->is_osr()) {
2326 __ Abort(AbortReason::kShouldNotDirectlyEnterOsrFunction);
2332 if (FLAG_code_comments) __ RecordComment(
"-- OSR entrypoint --");
2333 osr_pc_offset_ = __ pc_offset();
2334 shrink_slots -= osr_helper()->UnoptimizedFrameSlots();
2335 ResetSpeculationPoison();
2338 const RegList saves_fp = call_descriptor->CalleeSavedFPRegisters();
2339 const RegList saves = FLAG_enable_embedded_constant_pool
2340 ? call_descriptor->CalleeSavedRegisters() &
2341 ~kConstantPoolRegister.bit()
2342 : call_descriptor->CalleeSavedRegisters();
2344 if (shrink_slots > 0) {
2345 if (info()->IsWasm() && shrink_slots > 128) {
2355 if ((shrink_slots * kPointerSize) < (FLAG_stack_size * 1024)) {
2356 Register scratch = ip;
2359 FieldMemOperand(kWasmInstanceRegister,
2360 WasmInstanceObject::kRealStackLimitAddressOffset));
2361 __ LoadP(scratch, MemOperand(scratch), r0);
2362 __ Add(scratch, scratch, shrink_slots * kPointerSize, r0);
2363 __ cmpl(sp, scratch);
2368 FieldMemOperand(kWasmInstanceRegister,
2369 WasmInstanceObject::kCEntryStubOffset),
2371 __ Move(cp, Smi::zero());
2372 __ CallRuntimeWithCEntry(Runtime::kThrowWasmStackOverflow, r5);
2374 ReferenceMap* reference_map =
new (zone()) ReferenceMap(zone());
2375 RecordSafepoint(reference_map, Safepoint::kSimple, 0,
2376 Safepoint::kNoLazyDeopt);
2377 if (FLAG_debug_code) {
2378 __ stop(GetAbortReason(AbortReason::kUnexpectedReturnFromThrow));
2385 shrink_slots -= base::bits::CountPopulation(saves);
2386 shrink_slots -= frame()->GetReturnSlotCount();
2388 (kDoubleSize / kPointerSize) * base::bits::CountPopulation(saves_fp);
2389 __ Add(sp, sp, -shrink_slots * kPointerSize, r0);
2393 if (saves_fp != 0) {
2394 __ MultiPushDoubles(saves_fp);
2395 DCHECK_EQ(kNumCalleeSavedDoubles, base::bits::CountPopulation(saves_fp));
2400 __ MultiPush(saves);
2404 const int returns = frame()->GetReturnSlotCount();
2407 __ Add(sp, sp, -returns * kPointerSize, r0);
2411 void CodeGenerator::AssembleReturn(InstructionOperand* pop) {
2412 auto call_descriptor = linkage()->GetIncomingDescriptor();
2413 int pop_count =
static_cast<int>(call_descriptor->StackParameterCount());
2415 const int returns = frame()->GetReturnSlotCount();
2418 __ Add(sp, sp, returns * kPointerSize, r0);
2422 const RegList saves = FLAG_enable_embedded_constant_pool
2423 ? call_descriptor->CalleeSavedRegisters() &
2424 ~kConstantPoolRegister.bit()
2425 : call_descriptor->CalleeSavedRegisters();
2431 const RegList double_saves = call_descriptor->CalleeSavedFPRegisters();
2432 if (double_saves != 0) {
2433 __ MultiPopDoubles(double_saves);
2435 PPCOperandConverter g(
this,
nullptr);
2437 if (call_descriptor->IsCFunctionCall()) {
2438 AssembleDeconstructFrame();
2439 }
else if (frame_access_state()->has_frame()) {
2442 if (pop->IsImmediate() && g.ToConstant(pop).ToInt32() == 0) {
2443 if (return_label_.is_bound()) {
2444 __ b(&return_label_);
2447 __ bind(&return_label_);
2448 AssembleDeconstructFrame();
2451 AssembleDeconstructFrame();
2455 ConstantPoolUnavailableScope constant_pool_unavailable(tasm());
2456 if (pop->IsImmediate()) {
2457 DCHECK(Constant::kInt32 == g.ToConstant(pop).type() ||
2458 Constant::kInt64 == g.ToConstant(pop).type());
2459 pop_count += g.ToConstant(pop).ToInt32();
2461 __ Drop(g.ToRegister(pop));
2467 void CodeGenerator::FinishCode() { __ EmitConstantPool(); }
2469 void CodeGenerator::AssembleMove(InstructionOperand* source,
2470 InstructionOperand* destination) {
2471 PPCOperandConverter g(
this,
nullptr);
2474 if (source->IsRegister()) {
2475 DCHECK(destination->IsRegister() || destination->IsStackSlot());
2476 Register src = g.ToRegister(source);
2477 if (destination->IsRegister()) {
2478 __ Move(g.ToRegister(destination), src);
2480 __ StoreP(src, g.ToMemOperand(destination), r0);
2482 }
else if (source->IsStackSlot()) {
2483 DCHECK(destination->IsRegister() || destination->IsStackSlot());
2484 MemOperand src = g.ToMemOperand(source);
2485 if (destination->IsRegister()) {
2486 __ LoadP(g.ToRegister(destination), src, r0);
2488 Register temp = kScratchReg;
2489 __ LoadP(temp, src, r0);
2490 __ StoreP(temp, g.ToMemOperand(destination), r0);
2492 }
else if (source->IsConstant()) {
2493 Constant src = g.ToConstant(source);
2494 if (destination->IsRegister() || destination->IsStackSlot()) {
2496 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg;
2497 switch (src.type()) {
2498 case Constant::kInt32:
2499 #if V8_TARGET_ARCH_PPC64 2502 if (RelocInfo::IsWasmReference(src.rmode())) {
2504 __ mov(dst, Operand(src.ToInt32(), src.rmode()));
2506 __ mov(dst, Operand(src.ToInt32()));
2509 case Constant::kInt64:
2510 #if V8_TARGET_ARCH_PPC64 2511 if (RelocInfo::IsWasmReference(src.rmode())) {
2512 __ mov(dst, Operand(src.ToInt64(), src.rmode()));
2515 __ mov(dst, Operand(src.ToInt64()));
2516 #if V8_TARGET_ARCH_PPC64 2520 case Constant::kFloat32:
2521 __ mov(dst, Operand::EmbeddedNumber(src.ToFloat32()));
2523 case Constant::kFloat64:
2524 __ mov(dst, Operand::EmbeddedNumber(src.ToFloat64().value()));
2526 case Constant::kExternalReference:
2527 __ Move(dst, src.ToExternalReference());
2529 case Constant::kDelayedStringConstant:
2530 __ mov(dst, Operand::EmbeddedStringConstant(
2531 src.ToDelayedStringConstant()));
2533 case Constant::kHeapObject: {
2534 Handle<HeapObject> src_object = src.ToHeapObject();
2536 if (IsMaterializableFromRoot(src_object, &index)) {
2537 __ LoadRoot(dst, index);
2539 __ Move(dst, src_object);
2543 case Constant::kRpoNumber:
2547 if (destination->IsStackSlot()) {
2548 __ StoreP(dst, g.ToMemOperand(destination), r0);
2551 DoubleRegister dst = destination->IsFPRegister()
2552 ? g.ToDoubleRegister(destination)
2553 : kScratchDoubleReg;
2555 #if V8_HOST_ARCH_IA32 || V8_HOST_ARCH_X64 2558 if (src.type() == Constant::kFloat32) {
2559 uint32_t val = src.ToFloat32AsInt();
2560 if ((val & 0x7F800000) == 0x7F800000) {
2561 uint64_t dval =
static_cast<uint64_t
>(val);
2562 dval = ((dval & 0xC0000000) << 32) | ((dval & 0x40000000) << 31) |
2563 ((dval & 0x40000000) << 30) | ((dval & 0x7FFFFFFF) << 29);
2564 value = Double(dval);
2566 value = Double(static_cast<double>(src.ToFloat32()));
2569 value = Double(src.ToFloat64());
2572 value = src.type() == Constant::kFloat32
2573 ? Double(static_cast<double>(src.ToFloat32()))
2574 : Double(src.ToFloat64());
2576 __ LoadDoubleLiteral(dst, value, kScratchReg);
2577 if (destination->IsDoubleStackSlot()) {
2578 __ StoreDouble(dst, g.ToMemOperand(destination), r0);
2579 }
else if (destination->IsFloatStackSlot()) {
2580 __ StoreSingle(dst, g.ToMemOperand(destination), r0);
2583 }
else if (source->IsFPRegister()) {
2584 DoubleRegister src = g.ToDoubleRegister(source);
2585 if (destination->IsFPRegister()) {
2586 DoubleRegister dst = g.ToDoubleRegister(destination);
2589 DCHECK(destination->IsFPStackSlot());
2590 LocationOperand* op = LocationOperand::cast(source);
2591 if (op->representation() == MachineRepresentation::kFloat64) {
2592 __ StoreDouble(src, g.ToMemOperand(destination), r0);
2594 __ StoreSingle(src, g.ToMemOperand(destination), r0);
2597 }
else if (source->IsFPStackSlot()) {
2598 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot());
2599 MemOperand src = g.ToMemOperand(source);
2600 if (destination->IsFPRegister()) {
2601 LocationOperand* op = LocationOperand::cast(source);
2602 if (op->representation() == MachineRepresentation::kFloat64) {
2603 __ LoadDouble(g.ToDoubleRegister(destination), src, r0);
2605 __ LoadSingle(g.ToDoubleRegister(destination), src, r0);
2608 LocationOperand* op = LocationOperand::cast(source);
2609 DoubleRegister temp = kScratchDoubleReg;
2610 if (op->representation() == MachineRepresentation::kFloat64) {
2611 __ LoadDouble(temp, src, r0);
2612 __ StoreDouble(temp, g.ToMemOperand(destination), r0);
2614 __ LoadSingle(temp, src, r0);
2615 __ StoreSingle(temp, g.ToMemOperand(destination), r0);
2631 void CodeGenerator::AssembleSwap(InstructionOperand* source,
2632 InstructionOperand* destination) {
2633 PPCOperandConverter g(
this,
nullptr);
2634 if (source->IsRegister()) {
2635 Register src = g.ToRegister(source);
2636 if (destination->IsRegister()) {
2637 __ SwapP(src, g.ToRegister(destination), kScratchReg);
2639 DCHECK(destination->IsStackSlot());
2640 __ SwapP(src, g.ToMemOperand(destination), kScratchReg);
2642 }
else if (source->IsStackSlot()) {
2643 DCHECK(destination->IsStackSlot());
2644 __ SwapP(g.ToMemOperand(source), g.ToMemOperand(destination), kScratchReg,
2646 }
else if (source->IsFloatRegister()) {
2647 DoubleRegister src = g.ToDoubleRegister(source);
2648 if (destination->IsFloatRegister()) {
2649 __ SwapFloat32(src, g.ToDoubleRegister(destination), kScratchDoubleReg);
2651 DCHECK(destination->IsFloatStackSlot());
2652 __ SwapFloat32(src, g.ToMemOperand(destination), kScratchDoubleReg);
2654 }
else if (source->IsDoubleRegister()) {
2655 DoubleRegister src = g.ToDoubleRegister(source);
2656 if (destination->IsDoubleRegister()) {
2657 __ SwapDouble(src, g.ToDoubleRegister(destination), kScratchDoubleReg);
2659 DCHECK(destination->IsDoubleStackSlot());
2660 __ SwapDouble(src, g.ToMemOperand(destination), kScratchDoubleReg);
2662 }
else if (source->IsFloatStackSlot()) {
2663 DCHECK(destination->IsFloatStackSlot());
2664 __ SwapFloat32(g.ToMemOperand(source), g.ToMemOperand(destination),
2665 kScratchDoubleReg, d0);
2666 }
else if (source->IsDoubleStackSlot()) {
2667 DCHECK(destination->IsDoubleStackSlot());
2668 __ SwapDouble(g.ToMemOperand(source), g.ToMemOperand(destination),
2669 kScratchDoubleReg, d0);
2670 }
else if (source->IsSimd128Register()) {
2679 void CodeGenerator::AssembleJumpTable(Label** targets,
size_t target_count) {
2680 for (
size_t index = 0; index < target_count; ++index) {
2681 __ emit_label_addr(targets[index]);