5 #ifndef V8_WASM_BASELINE_MIPS64_LIFTOFF_ASSEMBLER_MIPS64_H_ 6 #define V8_WASM_BASELINE_MIPS64_LIFTOFF_ASSEMBLER_MIPS64_H_ 8 #include "src/wasm/baseline/liftoff-assembler.h" 10 #define BAILOUT(reason) bailout("mips64 " reason) 20 constexpr int32_t kConstantStackSpace = 16;
21 constexpr int32_t kFirstStackSlotOffset =
22 kConstantStackSpace + LiftoffAssembler::kStackSlotSize;
24 inline MemOperand GetStackSlot(
uint32_t index) {
25 int32_t offset = index * LiftoffAssembler::kStackSlotSize;
26 return MemOperand(fp, -kFirstStackSlotOffset - offset);
29 inline MemOperand GetInstanceOperand() {
return MemOperand(fp, -16); }
31 inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src,
35 assm->lw(dst.gp(), src);
38 assm->ld(dst.gp(), src);
41 assm->lwc1(dst.fp(), src);
44 assm->Ldc1(dst.fp(), src);
51 inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
52 LiftoffRegister src, ValueType type) {
53 MemOperand dst(base, offset);
56 assm->Usw(src.gp(), dst);
59 assm->Usd(src.gp(), dst);
62 assm->Uswc1(src.fp(), dst, t8);
65 assm->Usdc1(src.fp(), dst, t8);
72 inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueType type) {
75 assm->daddiu(sp, sp, -kPointerSize);
76 assm->sw(reg.gp(), MemOperand(sp, 0));
82 assm->daddiu(sp, sp, -kPointerSize);
83 assm->swc1(reg.fp(), MemOperand(sp, 0));
86 assm->daddiu(sp, sp, -kPointerSize);
87 assm->Sdc1(reg.fp(), MemOperand(sp, 0));
94 #if defined(V8_TARGET_BIG_ENDIAN) 95 inline void ChangeEndiannessLoad(LiftoffAssembler* assm, LiftoffRegister dst,
96 LoadType type, LiftoffRegList pinned) {
97 bool is_float =
false;
98 LiftoffRegister tmp = dst;
99 switch (type.value()) {
100 case LoadType::kI64Load8U:
101 case LoadType::kI64Load8S:
102 case LoadType::kI32Load8U:
103 case LoadType::kI32Load8S:
106 case LoadType::kF32Load:
108 tmp = assm->GetUnusedRegister(kGpReg, pinned);
109 assm->emit_type_conversion(kExprI32ReinterpretF32, tmp, dst);
111 case LoadType::kI64Load32U:
112 assm->TurboAssembler::ByteSwapUnsigned(tmp.gp(), tmp.gp(), 4);
114 case LoadType::kI32Load:
115 case LoadType::kI64Load32S:
116 assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 4);
118 case LoadType::kI32Load16S:
119 case LoadType::kI64Load16S:
120 assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 2);
122 case LoadType::kI32Load16U:
123 case LoadType::kI64Load16U:
124 assm->TurboAssembler::ByteSwapUnsigned(tmp.gp(), tmp.gp(), 2);
126 case LoadType::kF64Load:
128 tmp = assm->GetUnusedRegister(kGpReg, pinned);
129 assm->emit_type_conversion(kExprI64ReinterpretF64, tmp, dst);
131 case LoadType::kI64Load:
132 assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 8);
139 switch (type.value()) {
140 case LoadType::kF32Load:
141 assm->emit_type_conversion(kExprF32ReinterpretI32, dst, tmp);
143 case LoadType::kF64Load:
144 assm->emit_type_conversion(kExprF64ReinterpretI64, dst, tmp);
152 inline void ChangeEndiannessStore(LiftoffAssembler* assm, LiftoffRegister src,
153 StoreType type, LiftoffRegList pinned) {
154 bool is_float =
false;
155 LiftoffRegister tmp = src;
156 switch (type.value()) {
157 case StoreType::kI64Store8:
158 case StoreType::kI32Store8:
161 case StoreType::kF32Store:
163 tmp = assm->GetUnusedRegister(kGpReg, pinned);
164 assm->emit_type_conversion(kExprI32ReinterpretF32, tmp, src);
166 case StoreType::kI32Store:
167 assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 4);
169 case StoreType::kI32Store16:
170 assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 2);
172 case StoreType::kF64Store:
174 tmp = assm->GetUnusedRegister(kGpReg, pinned);
175 assm->emit_type_conversion(kExprI64ReinterpretF64, tmp, src);
177 case StoreType::kI64Store:
178 assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 8);
180 case StoreType::kI64Store32:
181 assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 4);
183 case StoreType::kI64Store16:
184 assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 2);
191 switch (type.value()) {
192 case StoreType::kF32Store:
193 assm->emit_type_conversion(kExprF32ReinterpretI32, src, tmp);
195 case StoreType::kF64Store:
196 assm->emit_type_conversion(kExprF64ReinterpretI64, src, tmp);
203 #endif // V8_TARGET_BIG_ENDIAN 207 int LiftoffAssembler::PrepareStackFrame() {
208 int offset = pc_offset();
218 void LiftoffAssembler::PatchPrepareStackFrame(
int offset,
220 uint64_t bytes = liftoff::kConstantStackSpace + kStackSlotSize * stack_slots;
221 DCHECK_LE(bytes, kMaxInt);
224 constexpr
int kAvailableSpace = 256;
225 TurboAssembler patching_assembler(
nullptr, AssemblerOptions{},
226 buffer_ + offset, kAvailableSpace,
227 CodeObjectRequired::kNo);
231 patching_assembler.Daddu(sp, sp, Operand(-bytes));
234 void LiftoffAssembler::FinishCode() {}
236 void LiftoffAssembler::AbortCompilation() {}
238 void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
239 RelocInfo::Mode rmode) {
240 switch (value.type()) {
242 TurboAssembler::li(reg.gp(), Operand(value.to_i32(), rmode));
245 TurboAssembler::li(reg.gp(), Operand(value.to_i64(), rmode));
248 TurboAssembler::Move(reg.fp(), value.to_f32_boxed().get_bits());
251 TurboAssembler::Move(reg.fp(), value.to_f64_boxed().get_bits());
258 void LiftoffAssembler::LoadFromInstance(Register dst,
uint32_t offset,
260 DCHECK_LE(offset, kMaxInt);
261 ld(dst, liftoff::GetInstanceOperand());
262 DCHECK(size == 4 || size == 8);
264 lw(dst, MemOperand(dst, offset));
266 ld(dst, MemOperand(dst, offset));
270 void LiftoffAssembler::SpillInstance(Register instance) {
271 sd(instance, liftoff::GetInstanceOperand());
274 void LiftoffAssembler::FillInstanceInto(Register dst) {
275 ld(dst, liftoff::GetInstanceOperand());
278 void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
279 Register offset_reg,
uint32_t offset_imm,
280 LoadType type, LiftoffRegList pinned,
281 uint32_t* protected_load_pc,
bool is_load_mem) {
282 Register src = no_reg;
283 if (offset_reg != no_reg) {
284 src = GetUnusedRegister(kGpReg, pinned).gp();
285 emit_ptrsize_add(src, src_addr, offset_reg);
287 MemOperand src_op = (offset_reg != no_reg) ? MemOperand(src, offset_imm)
288 : MemOperand(src_addr, offset_imm);
290 if (protected_load_pc) *protected_load_pc = pc_offset();
291 switch (type.value()) {
292 case LoadType::kI32Load8U:
293 case LoadType::kI64Load8U:
294 lbu(dst.gp(), src_op);
296 case LoadType::kI32Load8S:
297 case LoadType::kI64Load8S:
298 lb(dst.gp(), src_op);
300 case LoadType::kI32Load16U:
301 case LoadType::kI64Load16U:
302 TurboAssembler::Ulhu(dst.gp(), src_op);
304 case LoadType::kI32Load16S:
305 case LoadType::kI64Load16S:
306 TurboAssembler::Ulh(dst.gp(), src_op);
308 case LoadType::kI64Load32U:
309 TurboAssembler::Ulwu(dst.gp(), src_op);
311 case LoadType::kI32Load:
312 case LoadType::kI64Load32S:
313 TurboAssembler::Ulw(dst.gp(), src_op);
315 case LoadType::kI64Load:
316 TurboAssembler::Uld(dst.gp(), src_op);
318 case LoadType::kF32Load:
319 TurboAssembler::Ulwc1(dst.fp(), src_op, t8);
321 case LoadType::kF64Load:
322 TurboAssembler::Uldc1(dst.fp(), src_op, t8);
328 #if defined(V8_TARGET_BIG_ENDIAN) 330 pinned.set(src_op.rm());
331 liftoff::ChangeEndiannessLoad(
this, dst, type, pinned);
336 void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
337 uint32_t offset_imm, LiftoffRegister src,
338 StoreType type, LiftoffRegList pinned,
339 uint32_t* protected_store_pc,
bool is_store_mem) {
340 Register dst = no_reg;
341 MemOperand dst_op = MemOperand(dst_addr, offset_imm);
342 if (offset_reg != no_reg) {
346 dst = GetUnusedRegister(kGpReg, pinned).gp();
347 emit_ptrsize_add(dst, dst_addr, offset_reg);
348 dst_op = MemOperand(dst, offset_imm);
351 #if defined(V8_TARGET_BIG_ENDIAN) 353 pinned.set(dst_op.rm());
354 LiftoffRegister tmp = GetUnusedRegister(src.reg_class(), pinned);
356 Move(tmp, src, type.value_type());
360 liftoff::ChangeEndiannessStore(
this, src, type, pinned);
364 if (protected_store_pc) *protected_store_pc = pc_offset();
365 switch (type.value()) {
366 case StoreType::kI32Store8:
367 case StoreType::kI64Store8:
368 sb(src.gp(), dst_op);
370 case StoreType::kI32Store16:
371 case StoreType::kI64Store16:
372 TurboAssembler::Ush(src.gp(), dst_op, t8);
374 case StoreType::kI32Store:
375 case StoreType::kI64Store32:
376 TurboAssembler::Usw(src.gp(), dst_op);
378 case StoreType::kI64Store:
379 TurboAssembler::Usd(src.gp(), dst_op);
381 case StoreType::kF32Store:
382 TurboAssembler::Uswc1(src.fp(), dst_op, t8);
384 case StoreType::kF64Store:
385 TurboAssembler::Usdc1(src.fp(), dst_op, t8);
392 void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
395 MemOperand src(fp, kPointerSize * (caller_slot_idx + 1));
396 liftoff::Load(
this, dst, src, type);
399 void LiftoffAssembler::MoveStackValue(
uint32_t dst_index,
uint32_t src_index,
401 DCHECK_NE(dst_index, src_index);
402 LiftoffRegister reg = GetUnusedRegister(reg_class_for(type));
403 Fill(reg, src_index, type);
404 Spill(dst_index, reg, type);
407 void LiftoffAssembler::Move(Register dst, Register src, ValueType type) {
410 TurboAssembler::Move(dst, src);
413 void LiftoffAssembler::Move(DoubleRegister dst, DoubleRegister src,
416 TurboAssembler::Move(dst, src);
419 void LiftoffAssembler::Spill(
uint32_t index, LiftoffRegister reg,
421 RecordUsedSpillSlot(index);
422 MemOperand dst = liftoff::GetStackSlot(index);
434 TurboAssembler::Sdc1(reg.fp(), dst);
441 void LiftoffAssembler::Spill(
uint32_t index, WasmValue value) {
442 RecordUsedSpillSlot(index);
443 MemOperand dst = liftoff::GetStackSlot(index);
444 switch (value.type()) {
446 LiftoffRegister tmp = GetUnusedRegister(kGpReg);
447 TurboAssembler::li(tmp.gp(), Operand(value.to_i32()));
452 LiftoffRegister tmp = GetUnusedRegister(kGpReg);
453 TurboAssembler::li(tmp.gp(), value.to_i64());
464 void LiftoffAssembler::Fill(LiftoffRegister reg,
uint32_t index,
466 MemOperand src = liftoff::GetStackSlot(index);
478 TurboAssembler::Ldc1(reg.fp(), src);
485 void LiftoffAssembler::FillI64Half(Register,
uint32_t half_index) {
489 void LiftoffAssembler::emit_i32_mul(Register dst, Register lhs, Register rhs) {
490 TurboAssembler::Mul(dst, lhs, rhs);
493 void LiftoffAssembler::emit_i32_divs(Register dst, Register lhs, Register rhs,
494 Label* trap_div_by_zero,
495 Label* trap_div_unrepresentable) {
496 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
499 TurboAssembler::li(kScratchReg, 1);
500 TurboAssembler::li(kScratchReg2, 1);
501 TurboAssembler::LoadZeroOnCondition(kScratchReg, lhs, Operand(kMinInt), eq);
502 TurboAssembler::LoadZeroOnCondition(kScratchReg2, rhs, Operand(-1), eq);
503 daddu(kScratchReg, kScratchReg, kScratchReg2);
504 TurboAssembler::Branch(trap_div_unrepresentable, eq, kScratchReg,
507 TurboAssembler::Div(dst, lhs, rhs);
510 void LiftoffAssembler::emit_i32_divu(Register dst, Register lhs, Register rhs,
511 Label* trap_div_by_zero) {
512 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
513 TurboAssembler::Divu(dst, lhs, rhs);
516 void LiftoffAssembler::emit_i32_rems(Register dst, Register lhs, Register rhs,
517 Label* trap_div_by_zero) {
518 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
519 TurboAssembler::Mod(dst, lhs, rhs);
522 void LiftoffAssembler::emit_i32_remu(Register dst, Register lhs, Register rhs,
523 Label* trap_div_by_zero) {
524 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
525 TurboAssembler::Modu(dst, lhs, rhs);
528 #define I32_BINOP(name, instruction) \ 529 void LiftoffAssembler::emit_i32_##name(Register dst, Register lhs, \ 531 instruction(dst, lhs, rhs); \ 544 bool LiftoffAssembler::emit_i32_clz(Register dst, Register src) {
545 TurboAssembler::Clz(dst, src);
549 bool LiftoffAssembler::emit_i32_ctz(Register dst, Register src) {
550 TurboAssembler::Ctz(dst, src);
554 bool LiftoffAssembler::emit_i32_popcnt(Register dst, Register src) {
555 TurboAssembler::Popcnt(dst, src);
559 #define I32_SHIFTOP(name, instruction) \ 560 void LiftoffAssembler::emit_i32_##name( \ 561 Register dst, Register src, Register amount, LiftoffRegList pinned) { \ 562 instruction(dst, src, amount); \ 564 #define I32_SHIFTOP_I(name, instruction) \ 565 I32_SHIFTOP(name, instruction##v) \ 566 void LiftoffAssembler::emit_i32_##name(Register dst, Register src, \ 568 DCHECK(is_uint5(amount)); \ 569 instruction(dst, src, amount); \ 572 I32_SHIFTOP(shl, sllv)
573 I32_SHIFTOP(sar, srav)
574 I32_SHIFTOP_I(shr, srl)
579 void LiftoffAssembler::emit_i64_mul(LiftoffRegister dst, LiftoffRegister lhs,
580 LiftoffRegister rhs) {
581 TurboAssembler::Dmul(dst.gp(), lhs.gp(), rhs.gp());
584 bool LiftoffAssembler::emit_i64_divs(LiftoffRegister dst, LiftoffRegister lhs,
586 Label* trap_div_by_zero,
587 Label* trap_div_unrepresentable) {
588 TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg));
591 TurboAssembler::li(kScratchReg, 1);
592 TurboAssembler::li(kScratchReg2, 1);
593 TurboAssembler::LoadZeroOnCondition(
594 kScratchReg, lhs.gp(), Operand(std::numeric_limits<int64_t>::min()), eq);
595 TurboAssembler::LoadZeroOnCondition(kScratchReg2, rhs.gp(), Operand(-1), eq);
596 daddu(kScratchReg, kScratchReg, kScratchReg2);
597 TurboAssembler::Branch(trap_div_unrepresentable, eq, kScratchReg,
600 TurboAssembler::Ddiv(dst.gp(), lhs.gp(), rhs.gp());
604 bool LiftoffAssembler::emit_i64_divu(LiftoffRegister dst, LiftoffRegister lhs,
606 Label* trap_div_by_zero) {
607 TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg));
608 TurboAssembler::Ddivu(dst.gp(), lhs.gp(), rhs.gp());
612 bool LiftoffAssembler::emit_i64_rems(LiftoffRegister dst, LiftoffRegister lhs,
614 Label* trap_div_by_zero) {
615 TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg));
616 TurboAssembler::Dmod(dst.gp(), lhs.gp(), rhs.gp());
620 bool LiftoffAssembler::emit_i64_remu(LiftoffRegister dst, LiftoffRegister lhs,
622 Label* trap_div_by_zero) {
623 TurboAssembler::Branch(trap_div_by_zero, eq, rhs.gp(), Operand(zero_reg));
624 TurboAssembler::Dmodu(dst.gp(), lhs.gp(), rhs.gp());
628 #define I64_BINOP(name, instruction) \ 629 void LiftoffAssembler::emit_i64_##name( \ 630 LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \ 631 instruction(dst.gp(), lhs.gp(), rhs.gp()); \ 635 I64_BINOP(add, daddu)
636 I64_BINOP(sub, dsubu)
644 #define I64_SHIFTOP(name, instruction) \ 645 void LiftoffAssembler::emit_i64_##name(LiftoffRegister dst, \ 646 LiftoffRegister src, Register amount, \ 647 LiftoffRegList pinned) { \ 648 instruction(dst.gp(), src.gp(), amount); \ 650 #define I64_SHIFTOP_I(name, instruction) \ 651 I64_SHIFTOP(name, instruction##v) \ 652 void LiftoffAssembler::emit_i64_##name(LiftoffRegister dst, \ 653 LiftoffRegister src, int amount) { \ 654 DCHECK(is_uint6(amount)); \ 655 instruction(dst.gp(), src.gp(), amount); \ 658 I64_SHIFTOP(shl, dsllv)
659 I64_SHIFTOP(sar, dsrav)
660 I64_SHIFTOP_I(shr, dsrl)
665 void LiftoffAssembler::emit_i32_to_intptr(Register dst, Register src) {
666 addu(dst, src, zero_reg);
669 void LiftoffAssembler::emit_f32_neg(DoubleRegister dst, DoubleRegister src) {
670 TurboAssembler::Neg_s(dst, src);
673 void LiftoffAssembler::emit_f64_neg(DoubleRegister dst, DoubleRegister src) {
674 TurboAssembler::Neg_d(dst, src);
677 void LiftoffAssembler::emit_f32_min(DoubleRegister dst, DoubleRegister lhs,
678 DoubleRegister rhs) {
680 TurboAssembler::Float32Min(dst, lhs, rhs, &ool);
684 TurboAssembler::Float32MinOutOfLine(dst, lhs, rhs);
688 void LiftoffAssembler::emit_f32_max(DoubleRegister dst, DoubleRegister lhs,
689 DoubleRegister rhs) {
691 TurboAssembler::Float32Max(dst, lhs, rhs, &ool);
695 TurboAssembler::Float32MaxOutOfLine(dst, lhs, rhs);
699 void LiftoffAssembler::emit_f32_copysign(DoubleRegister dst, DoubleRegister lhs,
700 DoubleRegister rhs) {
701 BAILOUT(
"f32_copysign");
704 void LiftoffAssembler::emit_f64_min(DoubleRegister dst, DoubleRegister lhs,
705 DoubleRegister rhs) {
707 TurboAssembler::Float64Min(dst, lhs, rhs, &ool);
711 TurboAssembler::Float64MinOutOfLine(dst, lhs, rhs);
715 void LiftoffAssembler::emit_f64_max(DoubleRegister dst, DoubleRegister lhs,
716 DoubleRegister rhs) {
718 TurboAssembler::Float64Max(dst, lhs, rhs, &ool);
722 TurboAssembler::Float64MaxOutOfLine(dst, lhs, rhs);
726 void LiftoffAssembler::emit_f64_copysign(DoubleRegister dst, DoubleRegister lhs,
727 DoubleRegister rhs) {
728 BAILOUT(
"f64_copysign");
731 #define FP_BINOP(name, instruction) \ 732 void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister lhs, \ 733 DoubleRegister rhs) { \ 734 instruction(dst, lhs, rhs); \ 736 #define FP_UNOP(name, instruction) \ 737 void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \ 738 instruction(dst, src); \ 740 #define FP_UNOP_RETURN_TRUE(name, instruction) \ 741 bool LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \ 742 instruction(dst, src); \ 746 FP_BINOP(f32_add, add_s)
747 FP_BINOP(f32_sub, sub_s)
748 FP_BINOP(f32_mul, mul_s)
749 FP_BINOP(f32_div, div_s)
750 FP_UNOP(f32_abs, abs_s)
751 FP_UNOP_RETURN_TRUE(f32_ceil, Ceil_s_s)
752 FP_UNOP_RETURN_TRUE(f32_floor, Floor_s_s)
753 FP_UNOP_RETURN_TRUE(f32_trunc, Trunc_s_s)
754 FP_UNOP_RETURN_TRUE(f32_nearest_int, Round_s_s)
755 FP_UNOP(f32_sqrt, sqrt_s)
756 FP_BINOP(f64_add, add_d)
757 FP_BINOP(f64_sub, sub_d)
758 FP_BINOP(f64_mul, mul_d)
759 FP_BINOP(f64_div, div_d)
760 FP_UNOP(f64_abs, abs_d)
761 FP_UNOP_RETURN_TRUE(f64_ceil, Ceil_d_d)
762 FP_UNOP_RETURN_TRUE(f64_floor, Floor_d_d)
763 FP_UNOP_RETURN_TRUE(f64_trunc, Trunc_d_d)
764 FP_UNOP_RETURN_TRUE(f64_nearest_int, Round_d_d)
765 FP_UNOP(f64_sqrt, sqrt_d)
769 #undef FP_UNOP_RETURN_TRUE 771 bool LiftoffAssembler::emit_type_conversion(WasmOpcode opcode,
773 LiftoffRegister src, Label* trap) {
775 case kExprI32ConvertI64:
776 TurboAssembler::Ext(dst.gp(), src.gp(), 0, 32);
778 case kExprI32SConvertF32: {
779 LiftoffRegister rounded =
780 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src));
781 LiftoffRegister converted_back =
782 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src, rounded));
785 TurboAssembler::Trunc_s_s(rounded.fp(), src.fp());
786 trunc_w_s(kScratchDoubleReg, rounded.fp());
787 mfc1(dst.gp(), kScratchDoubleReg);
790 TurboAssembler::Addu(kScratchReg, dst.gp(), 1);
791 TurboAssembler::Slt(kScratchReg2, kScratchReg, dst.gp());
792 TurboAssembler::Movn(dst.gp(), kScratchReg, kScratchReg2);
795 mtc1(dst.gp(), kScratchDoubleReg);
796 cvt_s_w(converted_back.fp(), kScratchDoubleReg);
797 TurboAssembler::CompareF32(EQ, rounded.fp(), converted_back.fp());
798 TurboAssembler::BranchFalseF(trap);
801 case kExprI32UConvertF32: {
802 LiftoffRegister rounded =
803 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src));
804 LiftoffRegister converted_back =
805 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src, rounded));
808 TurboAssembler::Trunc_s_s(rounded.fp(), src.fp());
809 TurboAssembler::Trunc_uw_s(dst.gp(), rounded.fp(), kScratchDoubleReg);
812 TurboAssembler::Addu(kScratchReg, dst.gp(), 1);
813 TurboAssembler::Movz(dst.gp(), zero_reg, kScratchReg);
816 TurboAssembler::Cvt_d_uw(converted_back.fp(), dst.gp());
817 cvt_s_d(converted_back.fp(), converted_back.fp());
818 TurboAssembler::CompareF32(EQ, rounded.fp(), converted_back.fp());
819 TurboAssembler::BranchFalseF(trap);
822 case kExprI32SConvertF64: {
823 LiftoffRegister rounded =
824 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src));
825 LiftoffRegister converted_back =
826 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src, rounded));
829 TurboAssembler::Trunc_d_d(rounded.fp(), src.fp());
830 trunc_w_d(kScratchDoubleReg, rounded.fp());
831 mfc1(dst.gp(), kScratchDoubleReg);
834 cvt_d_w(converted_back.fp(), kScratchDoubleReg);
835 TurboAssembler::CompareF64(EQ, rounded.fp(), converted_back.fp());
836 TurboAssembler::BranchFalseF(trap);
839 case kExprI32UConvertF64: {
840 LiftoffRegister rounded =
841 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src));
842 LiftoffRegister converted_back =
843 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src, rounded));
846 TurboAssembler::Trunc_d_d(rounded.fp(), src.fp());
847 TurboAssembler::Trunc_uw_d(dst.gp(), rounded.fp(), kScratchDoubleReg);
850 TurboAssembler::Cvt_d_uw(converted_back.fp(), dst.gp());
851 TurboAssembler::CompareF64(EQ, rounded.fp(), converted_back.fp());
852 TurboAssembler::BranchFalseF(trap);
855 case kExprI32ReinterpretF32:
856 TurboAssembler::FmoveLow(dst.gp(), src.fp());
858 case kExprI64SConvertI32:
859 sll(dst.gp(), src.gp(), 0);
861 case kExprI64UConvertI32:
862 TurboAssembler::Dext(dst.gp(), src.gp(), 0, 32);
864 case kExprI64SConvertF32: {
865 LiftoffRegister rounded =
866 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src));
867 LiftoffRegister converted_back =
868 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src, rounded));
871 TurboAssembler::Trunc_s_s(rounded.fp(), src.fp());
872 trunc_l_s(kScratchDoubleReg, rounded.fp());
873 dmfc1(dst.gp(), kScratchDoubleReg);
876 TurboAssembler::Daddu(kScratchReg, dst.gp(), 1);
877 TurboAssembler::Slt(kScratchReg2, kScratchReg, dst.gp());
878 TurboAssembler::Movn(dst.gp(), kScratchReg, kScratchReg2);
881 dmtc1(dst.gp(), kScratchDoubleReg);
882 cvt_s_l(converted_back.fp(), kScratchDoubleReg);
883 TurboAssembler::CompareF32(EQ, rounded.fp(), converted_back.fp());
884 TurboAssembler::BranchFalseF(trap);
887 case kExprI64UConvertF32: {
889 TurboAssembler::Trunc_ul_s(dst.gp(), src.fp(), kScratchDoubleReg,
893 TurboAssembler::Branch(trap, eq, kScratchReg, Operand(zero_reg));
896 case kExprI64SConvertF64: {
897 LiftoffRegister rounded =
898 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src));
899 LiftoffRegister converted_back =
900 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src, rounded));
903 TurboAssembler::Trunc_d_d(rounded.fp(), src.fp());
904 trunc_l_d(kScratchDoubleReg, rounded.fp());
905 dmfc1(dst.gp(), kScratchDoubleReg);
908 TurboAssembler::Daddu(kScratchReg, dst.gp(), 1);
909 TurboAssembler::Slt(kScratchReg2, kScratchReg, dst.gp());
910 TurboAssembler::Movn(dst.gp(), kScratchReg, kScratchReg2);
913 dmtc1(dst.gp(), kScratchDoubleReg);
914 cvt_d_l(converted_back.fp(), kScratchDoubleReg);
915 TurboAssembler::CompareF64(EQ, rounded.fp(), converted_back.fp());
916 TurboAssembler::BranchFalseF(trap);
919 case kExprI64UConvertF64: {
921 TurboAssembler::Trunc_ul_d(dst.gp(), src.fp(), kScratchDoubleReg,
925 TurboAssembler::Branch(trap, eq, kScratchReg, Operand(zero_reg));
928 case kExprI64ReinterpretF64:
929 dmfc1(dst.gp(), src.fp());
931 case kExprF32SConvertI32: {
932 LiftoffRegister scratch =
933 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(dst));
934 mtc1(src.gp(), scratch.fp());
935 cvt_s_w(dst.fp(), scratch.fp());
938 case kExprF32UConvertI32:
939 TurboAssembler::Cvt_s_uw(dst.fp(), src.gp());
941 case kExprF32ConvertF64:
942 cvt_s_d(dst.fp(), src.fp());
944 case kExprF32ReinterpretI32:
945 TurboAssembler::FmoveLow(dst.fp(), src.gp());
947 case kExprF64SConvertI32: {
948 LiftoffRegister scratch =
949 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(dst));
950 mtc1(src.gp(), scratch.fp());
951 cvt_d_w(dst.fp(), scratch.fp());
954 case kExprF64UConvertI32:
955 TurboAssembler::Cvt_d_uw(dst.fp(), src.gp());
957 case kExprF64ConvertF32:
958 cvt_d_s(dst.fp(), src.fp());
960 case kExprF64ReinterpretI64:
961 dmtc1(src.gp(), dst.fp());
968 void LiftoffAssembler::emit_i32_signextend_i8(Register dst, Register src) {
969 BAILOUT(
"emit_i32_signextend_i8");
972 void LiftoffAssembler::emit_i32_signextend_i16(Register dst, Register src) {
973 BAILOUT(
"emit_i32_signextend_i16");
976 void LiftoffAssembler::emit_i64_signextend_i8(LiftoffRegister dst,
977 LiftoffRegister src) {
978 BAILOUT(
"emit_i64_signextend_i8");
981 void LiftoffAssembler::emit_i64_signextend_i16(LiftoffRegister dst,
982 LiftoffRegister src) {
983 BAILOUT(
"emit_i64_signextend_i16");
986 void LiftoffAssembler::emit_i64_signextend_i32(LiftoffRegister dst,
987 LiftoffRegister src) {
988 BAILOUT(
"emit_i64_signextend_i32");
991 void LiftoffAssembler::emit_jump(Label* label) {
992 TurboAssembler::Branch(label);
995 void LiftoffAssembler::emit_jump(Register target) {
996 TurboAssembler::Jump(target);
999 void LiftoffAssembler::emit_cond_jump(Condition cond, Label* label,
1000 ValueType type, Register lhs,
1002 if (rhs != no_reg) {
1003 TurboAssembler::Branch(label, cond, lhs, Operand(rhs));
1005 TurboAssembler::Branch(label, cond, lhs, Operand(zero_reg));
1009 void LiftoffAssembler::emit_i32_eqz(Register dst, Register src) {
1013 void LiftoffAssembler::emit_i32_set_cond(Condition cond, Register dst,
1014 Register lhs, Register rhs) {
1016 if (dst == lhs || dst == rhs) {
1017 tmp = GetUnusedRegister(kGpReg, LiftoffRegList::ForRegs(lhs, rhs)).gp();
1020 TurboAssembler::li(tmp, 1);
1023 Condition neg_cond = NegateCondition(cond);
1024 TurboAssembler::LoadZeroOnCondition(tmp, lhs, Operand(rhs), neg_cond);
1027 TurboAssembler::Move(dst, tmp);
1030 void LiftoffAssembler::emit_i64_eqz(Register dst, LiftoffRegister src) {
1031 sltiu(dst, src.gp(), 1);
1034 void LiftoffAssembler::emit_i64_set_cond(Condition cond, Register dst,
1035 LiftoffRegister lhs,
1036 LiftoffRegister rhs) {
1038 if (dst == lhs.gp() || dst == rhs.gp()) {
1039 tmp = GetUnusedRegister(kGpReg, LiftoffRegList::ForRegs(lhs, rhs)).gp();
1042 TurboAssembler::li(tmp, 1);
1045 Condition neg_cond = NegateCondition(cond);
1046 TurboAssembler::LoadZeroOnCondition(tmp, lhs.gp(), Operand(rhs.gp()),
1050 TurboAssembler::Move(dst, tmp);
1055 inline FPUCondition ConditionToConditionCmpFPU(
bool& predicate,
1056 Condition condition) {
1057 switch (condition) {
1064 case kUnsignedLessThan:
1067 case kUnsignedGreaterEqual:
1070 case kUnsignedLessEqual:
1073 case kUnsignedGreaterThan:
1085 void LiftoffAssembler::emit_f32_set_cond(Condition cond, Register dst,
1087 DoubleRegister rhs) {
1088 Label not_nan, cont;
1089 TurboAssembler::CompareIsNanF32(lhs, rhs);
1090 TurboAssembler::BranchFalseF(¬_nan);
1093 TurboAssembler::li(dst, 1);
1095 TurboAssembler::Move(dst, zero_reg);
1097 TurboAssembler::Branch(&cont);
1101 TurboAssembler::li(dst, 1);
1103 FPUCondition fcond = liftoff::ConditionToConditionCmpFPU(predicate, cond);
1104 TurboAssembler::CompareF32(fcond, lhs, rhs);
1106 TurboAssembler::LoadZeroIfNotFPUCondition(dst);
1108 TurboAssembler::LoadZeroIfFPUCondition(dst);
1114 void LiftoffAssembler::emit_f64_set_cond(Condition cond, Register dst,
1116 DoubleRegister rhs) {
1117 Label not_nan, cont;
1118 TurboAssembler::CompareIsNanF64(lhs, rhs);
1119 TurboAssembler::BranchFalseF(¬_nan);
1122 TurboAssembler::li(dst, 1);
1124 TurboAssembler::Move(dst, zero_reg);
1126 TurboAssembler::Branch(&cont);
1130 TurboAssembler::li(dst, 1);
1132 FPUCondition fcond = liftoff::ConditionToConditionCmpFPU(predicate, cond);
1133 TurboAssembler::CompareF64(fcond, lhs, rhs);
1135 TurboAssembler::LoadZeroIfNotFPUCondition(dst);
1137 TurboAssembler::LoadZeroIfFPUCondition(dst);
1143 void LiftoffAssembler::StackCheck(Label* ool_code, Register limit_address) {
1144 TurboAssembler::Uld(limit_address, MemOperand(limit_address));
1145 TurboAssembler::Branch(ool_code, ule, sp, Operand(limit_address));
1148 void LiftoffAssembler::CallTrapCallbackForTesting() {
1149 PrepareCallCFunction(0, GetUnusedRegister(kGpReg).gp());
1150 CallCFunction(ExternalReference::wasm_call_trap_callback_for_testing(), 0);
1153 void LiftoffAssembler::AssertUnreachable(AbortReason reason) {
1154 if (emit_debug_code()) Abort(reason);
1157 void LiftoffAssembler::PushRegisters(LiftoffRegList regs) {
1158 LiftoffRegList gp_regs = regs & kGpCacheRegList;
1159 unsigned num_gp_regs = gp_regs.GetNumRegsSet();
1161 unsigned offset = num_gp_regs * kPointerSize;
1162 daddiu(sp, sp, -offset);
1163 while (!gp_regs.is_empty()) {
1164 LiftoffRegister reg = gp_regs.GetFirstRegSet();
1165 offset -= kPointerSize;
1166 sd(reg.gp(), MemOperand(sp, offset));
1169 DCHECK_EQ(offset, 0);
1171 LiftoffRegList fp_regs = regs & kFpCacheRegList;
1172 unsigned num_fp_regs = fp_regs.GetNumRegsSet();
1174 daddiu(sp, sp, -(num_fp_regs * kStackSlotSize));
1175 unsigned offset = 0;
1176 while (!fp_regs.is_empty()) {
1177 LiftoffRegister reg = fp_regs.GetFirstRegSet();
1178 TurboAssembler::Sdc1(reg.fp(), MemOperand(sp, offset));
1180 offset +=
sizeof(double);
1182 DCHECK_EQ(offset, num_fp_regs *
sizeof(
double));
1186 void LiftoffAssembler::PopRegisters(LiftoffRegList regs) {
1187 LiftoffRegList fp_regs = regs & kFpCacheRegList;
1188 unsigned fp_offset = 0;
1189 while (!fp_regs.is_empty()) {
1190 LiftoffRegister reg = fp_regs.GetFirstRegSet();
1191 TurboAssembler::Ldc1(reg.fp(), MemOperand(sp, fp_offset));
1193 fp_offset +=
sizeof(double);
1195 if (fp_offset) daddiu(sp, sp, fp_offset);
1196 LiftoffRegList gp_regs = regs & kGpCacheRegList;
1197 unsigned gp_offset = 0;
1198 while (!gp_regs.is_empty()) {
1199 LiftoffRegister reg = gp_regs.GetLastRegSet();
1200 ld(reg.gp(), MemOperand(sp, gp_offset));
1202 gp_offset += kPointerSize;
1204 daddiu(sp, sp, gp_offset);
1207 void LiftoffAssembler::DropStackSlotsAndRet(
uint32_t num_stack_slots) {
1208 DCHECK_LT(num_stack_slots, (1 << 16) / kPointerSize);
1209 TurboAssembler::DropAndRet(static_cast<int>(num_stack_slots));
1212 void LiftoffAssembler::CallC(wasm::FunctionSig* sig,
1213 const LiftoffRegister* args,
1214 const LiftoffRegister* rets,
1215 ValueType out_argument_type,
int stack_bytes,
1216 ExternalReference ext_ref) {
1217 daddiu(sp, sp, -stack_bytes);
1220 for (ValueType param_type : sig->parameters()) {
1221 liftoff::Store(
this, sp, arg_bytes, *args++, param_type);
1222 arg_bytes += ValueTypes::MemSize(param_type);
1224 DCHECK_LE(arg_bytes, stack_bytes);
1228 constexpr Register kFirstArgReg = a0;
1229 mov(kFirstArgReg, sp);
1232 constexpr
int kNumCCallArgs = 1;
1233 PrepareCallCFunction(kNumCCallArgs, kScratchReg);
1234 CallCFunction(ext_ref, kNumCCallArgs);
1237 const LiftoffRegister* next_result_reg = rets;
1238 if (sig->return_count() > 0) {
1239 DCHECK_EQ(1, sig->return_count());
1240 constexpr Register kReturnReg = v0;
1241 if (kReturnReg != next_result_reg->gp()) {
1242 Move(*next_result_reg, LiftoffRegister(kReturnReg), sig->GetReturn(0));
1248 if (out_argument_type != kWasmStmt) {
1249 liftoff::Load(
this, *next_result_reg, MemOperand(sp, 0), out_argument_type);
1252 daddiu(sp, sp, stack_bytes);
1255 void LiftoffAssembler::CallNativeWasmCode(Address addr) {
1256 Call(addr, RelocInfo::WASM_CALL);
1259 void LiftoffAssembler::CallIndirect(wasm::FunctionSig* sig,
1260 compiler::CallDescriptor* call_descriptor,
1262 if (target == no_reg) {
1270 void LiftoffAssembler::CallRuntimeStub(WasmCode::RuntimeStubId sid) {
1273 Call(static_cast<Address>(sid), RelocInfo::WASM_STUB_CALL);
1276 void LiftoffAssembler::AllocateStackSlot(Register addr,
uint32_t size) {
1277 daddiu(sp, sp, -size);
1278 TurboAssembler::Move(addr, sp);
1281 void LiftoffAssembler::DeallocateStackSlot(
uint32_t size) {
1282 daddiu(sp, sp, size);
1285 void LiftoffStackSlots::Construct() {
1286 for (
auto& slot : slots_) {
1287 const LiftoffAssembler::VarState& src = slot.src_;
1288 switch (src.loc()) {
1289 case LiftoffAssembler::VarState::kStack:
1290 asm_->ld(kScratchReg, liftoff::GetStackSlot(slot.src_index_));
1291 asm_->push(kScratchReg);
1293 case LiftoffAssembler::VarState::kRegister:
1294 liftoff::push(asm_, src.reg(), src.type());
1296 case LiftoffAssembler::VarState::KIntConst: {
1297 asm_->li(kScratchReg, Operand(src.i32_const()));
1298 asm_->push(kScratchReg);
1311 #endif // V8_WASM_BASELINE_MIPS64_LIFTOFF_ASSEMBLER_MIPS64_H_