5 #ifndef V8_WASM_BASELINE_MIPS_LIFTOFF_ASSEMBLER_MIPS_H_ 6 #define V8_WASM_BASELINE_MIPS_LIFTOFF_ASSEMBLER_MIPS_H_ 8 #include "src/wasm/baseline/liftoff-assembler.h" 10 #define BAILOUT(reason) bailout("mips " reason) 18 #if defined(V8_TARGET_BIG_ENDIAN) 19 constexpr int32_t kLowWordOffset = 4;
20 constexpr int32_t kHighWordOffset = 0;
22 constexpr int32_t kLowWordOffset = 0;
23 constexpr int32_t kHighWordOffset = 4;
28 constexpr int32_t kConstantStackSpace = 8;
29 constexpr int32_t kFirstStackSlotOffset =
30 kConstantStackSpace + LiftoffAssembler::kStackSlotSize;
32 inline MemOperand GetStackSlot(
uint32_t index) {
33 int32_t offset = index * LiftoffAssembler::kStackSlotSize;
34 return MemOperand(fp, -kFirstStackSlotOffset - offset);
37 inline MemOperand GetHalfStackSlot(
uint32_t half_index) {
38 int32_t offset = half_index * (LiftoffAssembler::kStackSlotSize / 2);
39 return MemOperand(fp, -kFirstStackSlotOffset - offset);
42 inline MemOperand GetInstanceOperand() {
return MemOperand(fp, -8); }
44 inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Register base,
45 int32_t offset, ValueType type) {
46 MemOperand src(base, offset);
49 assm->lw(dst.gp(), src);
52 assm->lw(dst.low_gp(),
53 MemOperand(base, offset + liftoff::kLowWordOffset));
54 assm->lw(dst.high_gp(),
55 MemOperand(base, offset + liftoff::kHighWordOffset));
58 assm->lwc1(dst.fp(), src);
61 assm->Ldc1(dst.fp(), src);
68 inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
69 LiftoffRegister src, ValueType type) {
70 MemOperand dst(base, offset);
73 assm->Usw(src.gp(), dst);
76 assm->Usw(src.low_gp(),
77 MemOperand(base, offset + liftoff::kLowWordOffset));
78 assm->Usw(src.high_gp(),
79 MemOperand(base, offset + liftoff::kHighWordOffset));
82 assm->Uswc1(src.fp(), dst, t8);
85 assm->Usdc1(src.fp(), dst, t8);
92 inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueType type) {
98 assm->Push(reg.high_gp(), reg.low_gp());
101 assm->addiu(sp, sp, -
sizeof(
float));
102 assm->swc1(reg.fp(), MemOperand(sp, 0));
105 assm->addiu(sp, sp, -
sizeof(
double));
106 assm->Sdc1(reg.fp(), MemOperand(sp, 0));
113 #if defined(V8_TARGET_BIG_ENDIAN) 114 inline void ChangeEndiannessLoad(LiftoffAssembler* assm, LiftoffRegister dst,
115 LoadType type, LiftoffRegList pinned) {
116 bool is_float =
false;
117 LiftoffRegister tmp = dst;
118 switch (type.value()) {
119 case LoadType::kI64Load8U:
120 case LoadType::kI64Load8S:
121 case LoadType::kI32Load8U:
122 case LoadType::kI32Load8S:
125 case LoadType::kF32Load:
127 tmp = assm->GetUnusedRegister(kGpReg, pinned);
128 assm->emit_type_conversion(kExprI32ReinterpretF32, tmp, dst);
130 case LoadType::kI32Load:
131 assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 4);
133 case LoadType::kI32Load16S:
134 assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 2);
136 case LoadType::kI32Load16U:
137 assm->TurboAssembler::ByteSwapUnsigned(tmp.gp(), tmp.gp(), 2);
139 case LoadType::kF64Load:
141 tmp = assm->GetUnusedRegister(kGpRegPair, pinned);
142 assm->emit_type_conversion(kExprI64ReinterpretF64, tmp, dst);
144 case LoadType::kI64Load:
145 assm->TurboAssembler::Move(kScratchReg, tmp.low_gp());
146 assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.high_gp(), 4);
147 assm->TurboAssembler::ByteSwapSigned(tmp.high_gp(), kScratchReg, 4);
149 case LoadType::kI64Load16U:
150 assm->TurboAssembler::ByteSwapUnsigned(tmp.low_gp(), tmp.low_gp(), 2);
151 assm->TurboAssembler::Move(tmp.high_gp(), zero_reg);
153 case LoadType::kI64Load16S:
154 assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.low_gp(), 2);
155 assm->sra(tmp.high_gp(), tmp.low_gp(), 31);
157 case LoadType::kI64Load32U:
158 assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.low_gp(), 4);
159 assm->TurboAssembler::Move(tmp.high_gp(), zero_reg);
161 case LoadType::kI64Load32S:
162 assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.low_gp(), 4);
163 assm->sra(tmp.high_gp(), tmp.low_gp(), 31);
170 switch (type.value()) {
171 case LoadType::kF32Load:
172 assm->emit_type_conversion(kExprF32ReinterpretI32, dst, tmp);
174 case LoadType::kF64Load:
175 assm->emit_type_conversion(kExprF64ReinterpretI64, dst, tmp);
183 inline void ChangeEndiannessStore(LiftoffAssembler* assm, LiftoffRegister src,
184 StoreType type, LiftoffRegList pinned) {
185 bool is_float =
false;
186 LiftoffRegister tmp = src;
187 switch (type.value()) {
188 case StoreType::kI64Store8:
189 case StoreType::kI32Store8:
192 case StoreType::kF32Store:
194 tmp = assm->GetUnusedRegister(kGpReg, pinned);
195 assm->emit_type_conversion(kExprI32ReinterpretF32, tmp, src);
197 case StoreType::kI32Store:
198 assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 4);
200 case StoreType::kI32Store16:
201 assm->TurboAssembler::ByteSwapSigned(tmp.gp(), tmp.gp(), 2);
203 case StoreType::kF64Store:
205 tmp = assm->GetUnusedRegister(kGpRegPair, pinned);
206 assm->emit_type_conversion(kExprI64ReinterpretF64, tmp, src);
208 case StoreType::kI64Store:
209 assm->TurboAssembler::Move(kScratchReg, tmp.low_gp());
210 assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.high_gp(), 4);
211 assm->TurboAssembler::ByteSwapSigned(tmp.high_gp(), kScratchReg, 4);
213 case StoreType::kI64Store32:
214 assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.low_gp(), 4);
216 case StoreType::kI64Store16:
217 assm->TurboAssembler::ByteSwapSigned(tmp.low_gp(), tmp.low_gp(), 2);
224 switch (type.value()) {
225 case StoreType::kF32Store:
226 assm->emit_type_conversion(kExprF32ReinterpretI32, src, tmp);
228 case StoreType::kF64Store:
229 assm->emit_type_conversion(kExprF64ReinterpretI64, src, tmp);
236 #endif // V8_TARGET_BIG_ENDIAN 240 int LiftoffAssembler::PrepareStackFrame() {
241 int offset = pc_offset();
251 void LiftoffAssembler::PatchPrepareStackFrame(
int offset,
253 uint32_t bytes = liftoff::kConstantStackSpace + kStackSlotSize * stack_slots;
254 DCHECK_LE(bytes, kMaxInt);
257 constexpr
int kAvailableSpace = 256;
258 TurboAssembler patching_assembler(
nullptr, AssemblerOptions{},
259 buffer_ + offset, kAvailableSpace,
260 CodeObjectRequired::kNo);
264 patching_assembler.Addu(sp, sp, Operand(-bytes));
267 void LiftoffAssembler::FinishCode() {}
269 void LiftoffAssembler::AbortCompilation() {}
271 void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
272 RelocInfo::Mode rmode) {
273 switch (value.type()) {
275 TurboAssembler::li(reg.gp(), Operand(value.to_i32(), rmode));
278 DCHECK(RelocInfo::IsNone(rmode));
279 int32_t low_word = value.to_i64();
280 int32_t high_word = value.to_i64() >> 32;
281 TurboAssembler::li(reg.low_gp(), Operand(low_word));
282 TurboAssembler::li(reg.high_gp(), Operand(high_word));
286 TurboAssembler::Move(reg.fp(), value.to_f32_boxed().get_bits());
289 TurboAssembler::Move(reg.fp(), value.to_f64_boxed().get_bits());
296 void LiftoffAssembler::LoadFromInstance(Register dst,
uint32_t offset,
298 DCHECK_LE(offset, kMaxInt);
299 lw(dst, liftoff::GetInstanceOperand());
301 lw(dst, MemOperand(dst, offset));
304 void LiftoffAssembler::SpillInstance(Register instance) {
305 sw(instance, liftoff::GetInstanceOperand());
308 void LiftoffAssembler::FillInstanceInto(Register dst) {
309 lw(dst, liftoff::GetInstanceOperand());
312 void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
313 Register offset_reg,
uint32_t offset_imm,
314 LoadType type, LiftoffRegList pinned,
315 uint32_t* protected_load_pc,
bool is_load_mem) {
316 Register src = no_reg;
317 if (offset_reg != no_reg) {
318 src = GetUnusedRegister(kGpReg, pinned).gp();
319 emit_ptrsize_add(src, src_addr, offset_reg);
321 MemOperand src_op = (offset_reg != no_reg) ? MemOperand(src, offset_imm)
322 : MemOperand(src_addr, offset_imm);
324 if (protected_load_pc) *protected_load_pc = pc_offset();
325 switch (type.value()) {
326 case LoadType::kI32Load8U:
327 lbu(dst.gp(), src_op);
329 case LoadType::kI64Load8U:
330 lbu(dst.low_gp(), src_op);
331 xor_(dst.high_gp(), dst.high_gp(), dst.high_gp());
333 case LoadType::kI32Load8S:
334 lb(dst.gp(), src_op);
336 case LoadType::kI64Load8S:
337 lb(dst.low_gp(), src_op);
338 TurboAssembler::Move(dst.high_gp(), dst.low_gp());
339 sra(dst.high_gp(), dst.high_gp(), 31);
341 case LoadType::kI32Load16U:
342 TurboAssembler::Ulhu(dst.gp(), src_op);
344 case LoadType::kI64Load16U:
345 TurboAssembler::Ulhu(dst.low_gp(), src_op);
346 xor_(dst.high_gp(), dst.high_gp(), dst.high_gp());
348 case LoadType::kI32Load16S:
349 TurboAssembler::Ulh(dst.gp(), src_op);
351 case LoadType::kI64Load16S:
352 TurboAssembler::Ulh(dst.low_gp(), src_op);
353 TurboAssembler::Move(dst.high_gp(), dst.low_gp());
354 sra(dst.high_gp(), dst.high_gp(), 31);
356 case LoadType::kI32Load:
357 TurboAssembler::Ulw(dst.gp(), src_op);
359 case LoadType::kI64Load32U:
360 TurboAssembler::Ulw(dst.low_gp(), src_op);
361 xor_(dst.high_gp(), dst.high_gp(), dst.high_gp());
363 case LoadType::kI64Load32S:
364 TurboAssembler::Ulw(dst.low_gp(), src_op);
365 TurboAssembler::Move(dst.high_gp(), dst.low_gp());
366 sra(dst.high_gp(), dst.high_gp(), 31);
368 case LoadType::kI64Load: {
370 (offset_reg != no_reg)
371 ? MemOperand(src, offset_imm + liftoff::kLowWordOffset)
372 : MemOperand(src_addr, offset_imm + liftoff::kLowWordOffset);
373 MemOperand src_op_upper =
374 (offset_reg != no_reg)
375 ? MemOperand(src, offset_imm + liftoff::kHighWordOffset)
376 : MemOperand(src_addr, offset_imm + liftoff::kHighWordOffset);
377 TurboAssembler::Ulw(dst.low_gp(), src_op);
378 TurboAssembler::Ulw(dst.high_gp(), src_op_upper);
381 case LoadType::kF32Load:
382 TurboAssembler::Ulwc1(dst.fp(), src_op, t8);
384 case LoadType::kF64Load:
385 TurboAssembler::Uldc1(dst.fp(), src_op, t8);
391 #if defined(V8_TARGET_BIG_ENDIAN) 393 pinned.set(src_op.rm());
394 liftoff::ChangeEndiannessLoad(
this, dst, type, pinned);
399 void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
400 uint32_t offset_imm, LiftoffRegister src,
401 StoreType type, LiftoffRegList pinned,
402 uint32_t* protected_store_pc,
bool is_store_mem) {
403 Register dst = no_reg;
404 MemOperand dst_op = MemOperand(dst_addr, offset_imm);
405 if (offset_reg != no_reg) {
409 dst = GetUnusedRegister(kGpReg, pinned).gp();
410 emit_ptrsize_add(dst, dst_addr, offset_reg);
411 dst_op = MemOperand(dst, offset_imm);
414 #if defined(V8_TARGET_BIG_ENDIAN) 416 pinned = pinned | LiftoffRegList::ForRegs(dst_op.rm(), src);
417 LiftoffRegister tmp = GetUnusedRegister(src.reg_class(), pinned);
419 Move(tmp, src, type.value_type());
423 liftoff::ChangeEndiannessStore(
this, src, type, pinned);
427 if (protected_store_pc) *protected_store_pc = pc_offset();
428 switch (type.value()) {
429 case StoreType::kI64Store8:
432 case StoreType::kI32Store8:
433 sb(src.gp(), dst_op);
435 case StoreType::kI64Store16:
438 case StoreType::kI32Store16:
439 TurboAssembler::Ush(src.gp(), dst_op, t8);
441 case StoreType::kI64Store32:
444 case StoreType::kI32Store:
445 TurboAssembler::Usw(src.gp(), dst_op);
447 case StoreType::kI64Store: {
448 MemOperand dst_op_lower(dst_op.rm(),
449 offset_imm + liftoff::kLowWordOffset);
450 MemOperand dst_op_upper(dst_op.rm(),
451 offset_imm + liftoff::kHighWordOffset);
452 TurboAssembler::Usw(src.low_gp(), dst_op_lower);
453 TurboAssembler::Usw(src.high_gp(), dst_op_upper);
456 case StoreType::kF32Store:
457 TurboAssembler::Uswc1(src.fp(), dst_op, t8);
459 case StoreType::kF64Store:
460 TurboAssembler::Usdc1(src.fp(), dst_op, t8);
467 void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
470 int32_t offset = kPointerSize * (caller_slot_idx + 1);
471 liftoff::Load(
this, dst, fp, offset, type);
474 void LiftoffAssembler::MoveStackValue(
uint32_t dst_index,
uint32_t src_index,
476 DCHECK_NE(dst_index, src_index);
477 LiftoffRegister reg = GetUnusedRegister(reg_class_for(type));
478 Fill(reg, src_index, type);
479 Spill(dst_index, reg, type);
482 void LiftoffAssembler::Move(Register dst, Register src, ValueType type) {
484 TurboAssembler::mov(dst, src);
487 void LiftoffAssembler::Move(DoubleRegister dst, DoubleRegister src,
490 TurboAssembler::Move(dst, src);
493 void LiftoffAssembler::Spill(
uint32_t index, LiftoffRegister reg,
495 RecordUsedSpillSlot(index);
496 MemOperand dst = liftoff::GetStackSlot(index);
502 sw(reg.low_gp(), dst);
503 sw(reg.high_gp(), liftoff::GetHalfStackSlot(2 * index + 1));
509 TurboAssembler::Sdc1(reg.fp(), dst);
516 void LiftoffAssembler::Spill(
uint32_t index, WasmValue value) {
517 RecordUsedSpillSlot(index);
518 MemOperand dst = liftoff::GetStackSlot(index);
519 switch (value.type()) {
521 LiftoffRegister tmp = GetUnusedRegister(kGpReg);
522 TurboAssembler::li(tmp.gp(), Operand(value.to_i32()));
527 LiftoffRegister tmp = GetUnusedRegister(kGpRegPair);
529 int32_t low_word = value.to_i64();
530 int32_t high_word = value.to_i64() >> 32;
531 TurboAssembler::li(tmp.low_gp(), Operand(low_word));
532 TurboAssembler::li(tmp.high_gp(), Operand(high_word));
534 sw(tmp.low_gp(), dst);
535 sw(tmp.high_gp(), liftoff::GetHalfStackSlot(2 * index + 1));
545 void LiftoffAssembler::Fill(LiftoffRegister reg,
uint32_t index,
547 MemOperand src = liftoff::GetStackSlot(index);
553 lw(reg.low_gp(), src);
554 lw(reg.high_gp(), liftoff::GetHalfStackSlot(2 * index + 1));
560 TurboAssembler::Ldc1(reg.fp(), src);
567 void LiftoffAssembler::FillI64Half(Register reg,
uint32_t half_index) {
568 lw(reg, liftoff::GetHalfStackSlot(half_index));
571 void LiftoffAssembler::emit_i32_mul(Register dst, Register lhs, Register rhs) {
572 TurboAssembler::Mul(dst, lhs, rhs);
575 void LiftoffAssembler::emit_i32_divs(Register dst, Register lhs, Register rhs,
576 Label* trap_div_by_zero,
577 Label* trap_div_unrepresentable) {
578 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
581 TurboAssembler::li(kScratchReg, 1);
582 TurboAssembler::li(kScratchReg2, 1);
583 TurboAssembler::LoadZeroOnCondition(kScratchReg, lhs, Operand(kMinInt), eq);
584 TurboAssembler::LoadZeroOnCondition(kScratchReg2, rhs, Operand(-1), eq);
585 addu(kScratchReg, kScratchReg, kScratchReg2);
586 TurboAssembler::Branch(trap_div_unrepresentable, eq, kScratchReg,
589 TurboAssembler::Div(dst, lhs, rhs);
592 void LiftoffAssembler::emit_i32_divu(Register dst, Register lhs, Register rhs,
593 Label* trap_div_by_zero) {
594 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
595 TurboAssembler::Divu(dst, lhs, rhs);
598 void LiftoffAssembler::emit_i32_rems(Register dst, Register lhs, Register rhs,
599 Label* trap_div_by_zero) {
600 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
601 TurboAssembler::Mod(dst, lhs, rhs);
604 void LiftoffAssembler::emit_i32_remu(Register dst, Register lhs, Register rhs,
605 Label* trap_div_by_zero) {
606 TurboAssembler::Branch(trap_div_by_zero, eq, rhs, Operand(zero_reg));
607 TurboAssembler::Modu(dst, lhs, rhs);
610 #define I32_BINOP(name, instruction) \ 611 void LiftoffAssembler::emit_i32_##name(Register dst, Register lhs, \ 613 instruction(dst, lhs, rhs); \ 626 bool LiftoffAssembler::emit_i32_clz(Register dst, Register src) {
627 TurboAssembler::Clz(dst, src);
631 bool LiftoffAssembler::emit_i32_ctz(Register dst, Register src) {
632 TurboAssembler::Ctz(dst, src);
636 bool LiftoffAssembler::emit_i32_popcnt(Register dst, Register src) {
637 TurboAssembler::Popcnt(dst, src);
641 #define I32_SHIFTOP(name, instruction) \ 642 void LiftoffAssembler::emit_i32_##name( \ 643 Register dst, Register src, Register amount, LiftoffRegList pinned) { \ 644 instruction(dst, src, amount); \ 646 #define I32_SHIFTOP_I(name, instruction) \ 647 I32_SHIFTOP(name, instruction##v) \ 648 void LiftoffAssembler::emit_i32_##name(Register dst, Register src, \ 650 DCHECK(is_uint5(amount)); \ 651 instruction(dst, src, amount); \ 654 I32_SHIFTOP(shl, sllv)
655 I32_SHIFTOP(sar, srav)
656 I32_SHIFTOP_I(shr, srl)
661 void LiftoffAssembler::emit_i64_mul(LiftoffRegister dst, LiftoffRegister lhs,
662 LiftoffRegister rhs) {
663 TurboAssembler::MulPair(dst.low_gp(), dst.high_gp(), lhs.low_gp(),
664 lhs.high_gp(), rhs.low_gp(), rhs.high_gp(),
665 kScratchReg, kScratchReg2);
668 bool LiftoffAssembler::emit_i64_divs(LiftoffRegister dst, LiftoffRegister lhs,
670 Label* trap_div_by_zero,
671 Label* trap_div_unrepresentable) {
675 bool LiftoffAssembler::emit_i64_divu(LiftoffRegister dst, LiftoffRegister lhs,
677 Label* trap_div_by_zero) {
681 bool LiftoffAssembler::emit_i64_rems(LiftoffRegister dst, LiftoffRegister lhs,
683 Label* trap_div_by_zero) {
687 bool LiftoffAssembler::emit_i64_remu(LiftoffRegister dst, LiftoffRegister lhs,
689 Label* trap_div_by_zero) {
693 void LiftoffAssembler::emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs,
694 LiftoffRegister rhs) {
695 TurboAssembler::AddPair(dst.low_gp(), dst.high_gp(), lhs.low_gp(),
696 lhs.high_gp(), rhs.low_gp(), rhs.high_gp(),
697 kScratchReg, kScratchReg2);
700 void LiftoffAssembler::emit_i64_sub(LiftoffRegister dst, LiftoffRegister lhs,
701 LiftoffRegister rhs) {
702 TurboAssembler::SubPair(dst.low_gp(), dst.high_gp(), lhs.low_gp(),
703 lhs.high_gp(), rhs.low_gp(), rhs.high_gp(),
704 kScratchReg, kScratchReg2);
709 inline bool IsRegInRegPair(LiftoffRegister pair, Register reg) {
710 DCHECK(pair.is_pair());
711 return pair.low_gp() == reg || pair.high_gp() == reg;
714 inline void Emit64BitShiftOperation(
715 LiftoffAssembler* assm, LiftoffRegister dst, LiftoffRegister src,
717 void (TurboAssembler::*emit_shift)(Register, Register, Register, Register,
718 Register, Register, Register),
719 LiftoffRegList pinned) {
729 LiftoffRegister tmp = assm->GetUnusedRegister(kGpRegPair, pinned);
732 assm->TurboAssembler::Branch(&move, eq, amount, Operand(zero_reg));
734 if (liftoff::IsRegInRegPair(dst, amount) || dst.overlaps(src)) {
736 (assm->*emit_shift)(tmp.low_gp(), tmp.high_gp(), src.low_gp(),
737 src.high_gp(), amount, kScratchReg, kScratchReg2);
740 assm->TurboAssembler::Move(dst.high_gp(), tmp.high_gp());
741 assm->TurboAssembler::Move(dst.low_gp(), tmp.low_gp());
743 (assm->*emit_shift)(dst.low_gp(), dst.high_gp(), src.low_gp(),
744 src.high_gp(), amount, kScratchReg, kScratchReg2);
746 assm->TurboAssembler::Branch(&done);
750 assm->TurboAssembler::Move(dst.high_gp(), src.high_gp());
751 assm->TurboAssembler::Move(dst.low_gp(), src.low_gp());
757 void LiftoffAssembler::emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
758 Register amount, LiftoffRegList pinned) {
759 liftoff::Emit64BitShiftOperation(
this, dst, src, amount,
760 &TurboAssembler::ShlPair, pinned);
763 void LiftoffAssembler::emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
764 Register amount, LiftoffRegList pinned) {
765 liftoff::Emit64BitShiftOperation(
this, dst, src, amount,
766 &TurboAssembler::SarPair, pinned);
769 void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
770 Register amount, LiftoffRegList pinned) {
771 liftoff::Emit64BitShiftOperation(
this, dst, src, amount,
772 &TurboAssembler::ShrPair, pinned);
775 void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
777 DCHECK(is_uint6(amount));
778 ShrPair(dst.high_gp(), dst.low_gp(), src.high_gp(), src.low_gp(), amount,
782 void LiftoffAssembler::emit_i32_to_intptr(Register dst, Register src) {
786 void LiftoffAssembler::emit_f32_neg(DoubleRegister dst, DoubleRegister src) {
787 TurboAssembler::Neg_s(dst, src);
790 void LiftoffAssembler::emit_f64_neg(DoubleRegister dst, DoubleRegister src) {
791 TurboAssembler::Neg_d(dst, src);
794 void LiftoffAssembler::emit_f32_min(DoubleRegister dst, DoubleRegister lhs,
795 DoubleRegister rhs) {
797 TurboAssembler::Float32Min(dst, lhs, rhs, &ool);
801 TurboAssembler::Float32MinOutOfLine(dst, lhs, rhs);
805 void LiftoffAssembler::emit_f32_max(DoubleRegister dst, DoubleRegister lhs,
806 DoubleRegister rhs) {
808 TurboAssembler::Float32Max(dst, lhs, rhs, &ool);
812 TurboAssembler::Float32MaxOutOfLine(dst, lhs, rhs);
816 void LiftoffAssembler::emit_f32_copysign(DoubleRegister dst, DoubleRegister lhs,
817 DoubleRegister rhs) {
818 BAILOUT(
"f32_copysign");
821 void LiftoffAssembler::emit_f64_min(DoubleRegister dst, DoubleRegister lhs,
822 DoubleRegister rhs) {
824 TurboAssembler::Float64Min(dst, lhs, rhs, &ool);
828 TurboAssembler::Float64MinOutOfLine(dst, lhs, rhs);
832 void LiftoffAssembler::emit_f64_max(DoubleRegister dst, DoubleRegister lhs,
833 DoubleRegister rhs) {
835 TurboAssembler::Float64Max(dst, lhs, rhs, &ool);
839 TurboAssembler::Float64MaxOutOfLine(dst, lhs, rhs);
843 void LiftoffAssembler::emit_f64_copysign(DoubleRegister dst, DoubleRegister lhs,
844 DoubleRegister rhs) {
845 BAILOUT(
"f64_copysign");
848 #define FP_BINOP(name, instruction) \ 849 void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister lhs, \ 850 DoubleRegister rhs) { \ 851 instruction(dst, lhs, rhs); \ 853 #define FP_UNOP(name, instruction) \ 854 void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \ 855 instruction(dst, src); \ 857 #define FP_UNOP_RETURN_TRUE(name, instruction) \ 858 bool LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \ 859 instruction(dst, src); \ 863 FP_BINOP(f32_add, add_s)
864 FP_BINOP(f32_sub, sub_s)
865 FP_BINOP(f32_mul, mul_s)
866 FP_BINOP(f32_div, div_s)
867 FP_UNOP(f32_abs, abs_s)
868 FP_UNOP_RETURN_TRUE(f32_ceil, Ceil_s_s)
869 FP_UNOP_RETURN_TRUE(f32_floor, Floor_s_s)
870 FP_UNOP_RETURN_TRUE(f32_trunc, Trunc_s_s)
871 FP_UNOP_RETURN_TRUE(f32_nearest_int, Round_s_s)
872 FP_UNOP(f32_sqrt, sqrt_s)
873 FP_BINOP(f64_add, add_d)
874 FP_BINOP(f64_sub, sub_d)
875 FP_BINOP(f64_mul, mul_d)
876 FP_BINOP(f64_div, div_d)
877 FP_UNOP(f64_abs, abs_d)
878 FP_UNOP(f64_sqrt, sqrt_d)
883 bool LiftoffAssembler::emit_f64_ceil(DoubleRegister dst, DoubleRegister src) {
884 if ((IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) &&
892 bool LiftoffAssembler::emit_f64_floor(DoubleRegister dst, DoubleRegister src) {
893 if ((IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) &&
901 bool LiftoffAssembler::emit_f64_trunc(DoubleRegister dst, DoubleRegister src) {
902 if ((IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) &&
910 bool LiftoffAssembler::emit_f64_nearest_int(DoubleRegister dst,
911 DoubleRegister src) {
912 if ((IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) &&
920 bool LiftoffAssembler::emit_type_conversion(WasmOpcode opcode,
922 LiftoffRegister src, Label* trap) {
924 case kExprI32ConvertI64:
925 TurboAssembler::Move(dst.gp(), src.low_gp());
927 case kExprI32SConvertF32: {
928 LiftoffRegister rounded =
929 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src));
930 LiftoffRegister converted_back =
931 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src, rounded));
934 TurboAssembler::Trunc_s_s(rounded.fp(), src.fp());
935 trunc_w_s(kScratchDoubleReg, rounded.fp());
936 mfc1(dst.gp(), kScratchDoubleReg);
939 TurboAssembler::Addu(kScratchReg, dst.gp(), 1);
940 TurboAssembler::Slt(kScratchReg2, kScratchReg, dst.gp());
941 TurboAssembler::Movn(dst.gp(), kScratchReg, kScratchReg2);
944 mtc1(dst.gp(), kScratchDoubleReg);
945 cvt_s_w(converted_back.fp(), kScratchDoubleReg);
946 TurboAssembler::CompareF32(EQ, rounded.fp(), converted_back.fp());
947 TurboAssembler::BranchFalseF(trap);
950 case kExprI32UConvertF32: {
951 LiftoffRegister rounded =
952 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src));
953 LiftoffRegister converted_back =
954 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src, rounded));
957 TurboAssembler::Trunc_s_s(rounded.fp(), src.fp());
958 TurboAssembler::Trunc_uw_s(dst.gp(), rounded.fp(), kScratchDoubleReg);
961 TurboAssembler::Addu(kScratchReg, dst.gp(), 1);
962 TurboAssembler::Movz(dst.gp(), zero_reg, kScratchReg);
965 TurboAssembler::Cvt_d_uw(converted_back.fp(), dst.gp(),
967 cvt_s_d(converted_back.fp(), converted_back.fp());
968 TurboAssembler::CompareF32(EQ, rounded.fp(), converted_back.fp());
969 TurboAssembler::BranchFalseF(trap);
972 case kExprI32SConvertF64: {
973 if ((IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) &&
975 LiftoffRegister rounded =
976 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src));
977 LiftoffRegister converted_back =
978 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src, rounded));
981 TurboAssembler::Trunc_d_d(rounded.fp(), src.fp());
982 TurboAssembler::Trunc_w_d(kScratchDoubleReg, rounded.fp());
983 mfc1(dst.gp(), kScratchDoubleReg);
986 cvt_d_w(converted_back.fp(), kScratchDoubleReg);
987 TurboAssembler::CompareF64(EQ, rounded.fp(), converted_back.fp());
988 TurboAssembler::BranchFalseF(trap);
991 BAILOUT(
"emit_type_conversion kExprI32SConvertF64");
995 case kExprI32UConvertF64: {
996 if ((IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) &&
998 LiftoffRegister rounded =
999 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src));
1000 LiftoffRegister converted_back =
1001 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(src, rounded));
1004 TurboAssembler::Trunc_d_d(rounded.fp(), src.fp());
1005 TurboAssembler::Trunc_uw_d(dst.gp(), rounded.fp(), kScratchDoubleReg);
1008 TurboAssembler::Cvt_d_uw(converted_back.fp(), dst.gp(),
1010 TurboAssembler::CompareF64(EQ, rounded.fp(), converted_back.fp());
1011 TurboAssembler::BranchFalseF(trap);
1014 BAILOUT(
"emit_type_conversion kExprI32UConvertF64");
1018 case kExprI32ReinterpretF32:
1019 mfc1(dst.gp(), src.fp());
1021 case kExprI64SConvertI32:
1022 TurboAssembler::Move(dst.low_gp(), src.gp());
1023 TurboAssembler::Move(dst.high_gp(), src.gp());
1024 sra(dst.high_gp(), dst.high_gp(), 31);
1026 case kExprI64UConvertI32:
1027 TurboAssembler::Move(dst.low_gp(), src.gp());
1028 TurboAssembler::Move(dst.high_gp(), zero_reg);
1030 case kExprI64ReinterpretF64:
1031 mfc1(dst.low_gp(), src.fp());
1032 TurboAssembler::Mfhc1(dst.high_gp(), src.fp());
1034 case kExprF32SConvertI32: {
1035 LiftoffRegister scratch =
1036 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(dst));
1037 mtc1(src.gp(), scratch.fp());
1038 cvt_s_w(dst.fp(), scratch.fp());
1041 case kExprF32UConvertI32: {
1042 LiftoffRegister scratch =
1043 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(dst));
1044 TurboAssembler::Cvt_d_uw(dst.fp(), src.gp(), scratch.fp());
1045 cvt_s_d(dst.fp(), dst.fp());
1048 case kExprF32ConvertF64:
1049 cvt_s_d(dst.fp(), src.fp());
1051 case kExprF32ReinterpretI32:
1052 TurboAssembler::FmoveLow(dst.fp(), src.gp());
1054 case kExprF64SConvertI32: {
1055 LiftoffRegister scratch =
1056 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(dst));
1057 mtc1(src.gp(), scratch.fp());
1058 cvt_d_w(dst.fp(), scratch.fp());
1061 case kExprF64UConvertI32: {
1062 LiftoffRegister scratch =
1063 GetUnusedRegister(kFpReg, LiftoffRegList::ForRegs(dst));
1064 TurboAssembler::Cvt_d_uw(dst.fp(), src.gp(), scratch.fp());
1067 case kExprF64ConvertF32:
1068 cvt_d_s(dst.fp(), src.fp());
1070 case kExprF64ReinterpretI64:
1071 mtc1(src.low_gp(), dst.fp());
1072 TurboAssembler::Mthc1(src.high_gp(), dst.fp());
1079 void LiftoffAssembler::emit_i32_signextend_i8(Register dst, Register src) {
1080 BAILOUT(
"emit_i32_signextend_i8");
1083 void LiftoffAssembler::emit_i32_signextend_i16(Register dst, Register src) {
1084 BAILOUT(
"emit_i32_signextend_i16");
1087 void LiftoffAssembler::emit_i64_signextend_i8(LiftoffRegister dst,
1088 LiftoffRegister src) {
1089 BAILOUT(
"emit_i64_signextend_i8");
1092 void LiftoffAssembler::emit_i64_signextend_i16(LiftoffRegister dst,
1093 LiftoffRegister src) {
1094 BAILOUT(
"emit_i64_signextend_i16");
1097 void LiftoffAssembler::emit_i64_signextend_i32(LiftoffRegister dst,
1098 LiftoffRegister src) {
1099 BAILOUT(
"emit_i64_signextend_i32");
1102 void LiftoffAssembler::emit_jump(Label* label) {
1103 TurboAssembler::Branch(label);
1106 void LiftoffAssembler::emit_jump(Register target) {
1107 TurboAssembler::Jump(target);
1110 void LiftoffAssembler::emit_cond_jump(Condition cond, Label* label,
1111 ValueType type, Register lhs,
1113 if (rhs != no_reg) {
1114 TurboAssembler::Branch(label, cond, lhs, Operand(rhs));
1116 TurboAssembler::Branch(label, cond, lhs, Operand(zero_reg));
1120 void LiftoffAssembler::emit_i32_eqz(Register dst, Register src) {
1124 void LiftoffAssembler::emit_i32_set_cond(Condition cond, Register dst,
1125 Register lhs, Register rhs) {
1127 if (dst == lhs || dst == rhs) {
1128 tmp = GetUnusedRegister(kGpReg, LiftoffRegList::ForRegs(lhs, rhs)).gp();
1131 TurboAssembler::li(tmp, 1);
1134 Condition neg_cond = NegateCondition(cond);
1135 TurboAssembler::LoadZeroOnCondition(tmp, lhs, Operand(rhs), neg_cond);
1138 TurboAssembler::Move(dst, tmp);
1141 void LiftoffAssembler::emit_i64_eqz(Register dst, LiftoffRegister src) {
1143 GetUnusedRegister(kGpReg, LiftoffRegList::ForRegs(src, dst)).gp();
1144 sltiu(tmp, src.low_gp(), 1);
1145 sltiu(dst, src.high_gp(), 1);
1146 and_(dst, dst, tmp);
1150 inline Condition cond_make_unsigned(Condition cond) {
1152 case kSignedLessThan:
1153 return kUnsignedLessThan;
1154 case kSignedLessEqual:
1155 return kUnsignedLessEqual;
1156 case kSignedGreaterThan:
1157 return kUnsignedGreaterThan;
1158 case kSignedGreaterEqual:
1159 return kUnsignedGreaterEqual;
1166 void LiftoffAssembler::emit_i64_set_cond(Condition cond, Register dst,
1167 LiftoffRegister lhs,
1168 LiftoffRegister rhs) {
1174 Condition unsigned_cond = liftoff::cond_make_unsigned(cond);
1177 if (liftoff::IsRegInRegPair(lhs, dst) || liftoff::IsRegInRegPair(rhs, dst)) {
1179 GetUnusedRegister(kGpReg, LiftoffRegList::ForRegs(dst, lhs, rhs)).gp();
1183 TurboAssembler::li(tmp, 1);
1186 Branch(&low, eq, lhs.high_gp(), Operand(rhs.high_gp()));
1188 TurboAssembler::LoadZeroOnCondition(
1189 tmp, lhs.high_gp(), Operand(rhs.high_gp()), NegateCondition(cond));
1193 TurboAssembler::LoadZeroOnCondition(tmp, lhs.low_gp(), Operand(rhs.low_gp()),
1194 NegateCondition(unsigned_cond));
1198 TurboAssembler::Move(dst, tmp);
1203 inline FPUCondition ConditionToConditionCmpFPU(
bool& predicate,
1204 Condition condition) {
1205 switch (condition) {
1212 case kUnsignedLessThan:
1215 case kUnsignedGreaterEqual:
1218 case kUnsignedLessEqual:
1221 case kUnsignedGreaterThan:
1233 void LiftoffAssembler::emit_f32_set_cond(Condition cond, Register dst,
1235 DoubleRegister rhs) {
1236 Label not_nan, cont;
1237 TurboAssembler::CompareIsNanF32(lhs, rhs);
1238 TurboAssembler::BranchFalseF(¬_nan);
1241 TurboAssembler::li(dst, 1);
1243 TurboAssembler::Move(dst, zero_reg);
1245 TurboAssembler::Branch(&cont);
1249 TurboAssembler::li(dst, 1);
1251 FPUCondition fcond = liftoff::ConditionToConditionCmpFPU(predicate, cond);
1252 TurboAssembler::CompareF32(fcond, lhs, rhs);
1254 TurboAssembler::LoadZeroIfNotFPUCondition(dst);
1256 TurboAssembler::LoadZeroIfFPUCondition(dst);
1262 void LiftoffAssembler::emit_f64_set_cond(Condition cond, Register dst,
1264 DoubleRegister rhs) {
1265 Label not_nan, cont;
1266 TurboAssembler::CompareIsNanF64(lhs, rhs);
1267 TurboAssembler::BranchFalseF(¬_nan);
1270 TurboAssembler::li(dst, 1);
1272 TurboAssembler::Move(dst, zero_reg);
1274 TurboAssembler::Branch(&cont);
1278 TurboAssembler::li(dst, 1);
1280 FPUCondition fcond = liftoff::ConditionToConditionCmpFPU(predicate, cond);
1281 TurboAssembler::CompareF64(fcond, lhs, rhs);
1283 TurboAssembler::LoadZeroIfNotFPUCondition(dst);
1285 TurboAssembler::LoadZeroIfFPUCondition(dst);
1291 void LiftoffAssembler::StackCheck(Label* ool_code, Register limit_address) {
1292 TurboAssembler::Ulw(limit_address, MemOperand(limit_address));
1293 TurboAssembler::Branch(ool_code, ule, sp, Operand(limit_address));
1296 void LiftoffAssembler::CallTrapCallbackForTesting() {
1297 PrepareCallCFunction(0, GetUnusedRegister(kGpReg).gp());
1298 CallCFunction(ExternalReference::wasm_call_trap_callback_for_testing(), 0);
1301 void LiftoffAssembler::AssertUnreachable(AbortReason reason) {
1302 if (emit_debug_code()) Abort(reason);
1305 void LiftoffAssembler::PushRegisters(LiftoffRegList regs) {
1306 LiftoffRegList gp_regs = regs & kGpCacheRegList;
1307 unsigned num_gp_regs = gp_regs.GetNumRegsSet();
1309 unsigned offset = num_gp_regs * kPointerSize;
1310 addiu(sp, sp, -offset);
1311 while (!gp_regs.is_empty()) {
1312 LiftoffRegister reg = gp_regs.GetFirstRegSet();
1313 offset -= kPointerSize;
1314 sw(reg.gp(), MemOperand(sp, offset));
1317 DCHECK_EQ(offset, 0);
1319 LiftoffRegList fp_regs = regs & kFpCacheRegList;
1320 unsigned num_fp_regs = fp_regs.GetNumRegsSet();
1322 addiu(sp, sp, -(num_fp_regs * kStackSlotSize));
1323 unsigned offset = 0;
1324 while (!fp_regs.is_empty()) {
1325 LiftoffRegister reg = fp_regs.GetFirstRegSet();
1326 TurboAssembler::Sdc1(reg.fp(), MemOperand(sp, offset));
1328 offset +=
sizeof(double);
1330 DCHECK_EQ(offset, num_fp_regs *
sizeof(
double));
1334 void LiftoffAssembler::PopRegisters(LiftoffRegList regs) {
1335 LiftoffRegList fp_regs = regs & kFpCacheRegList;
1336 unsigned fp_offset = 0;
1337 while (!fp_regs.is_empty()) {
1338 LiftoffRegister reg = fp_regs.GetFirstRegSet();
1339 TurboAssembler::Ldc1(reg.fp(), MemOperand(sp, fp_offset));
1341 fp_offset +=
sizeof(double);
1343 if (fp_offset) addiu(sp, sp, fp_offset);
1344 LiftoffRegList gp_regs = regs & kGpCacheRegList;
1345 unsigned gp_offset = 0;
1346 while (!gp_regs.is_empty()) {
1347 LiftoffRegister reg = gp_regs.GetLastRegSet();
1348 lw(reg.gp(), MemOperand(sp, gp_offset));
1350 gp_offset += kPointerSize;
1352 addiu(sp, sp, gp_offset);
1355 void LiftoffAssembler::DropStackSlotsAndRet(
uint32_t num_stack_slots) {
1356 DCHECK_LT(num_stack_slots, (1 << 16) / kPointerSize);
1357 TurboAssembler::DropAndRet(static_cast<int>(num_stack_slots));
1360 void LiftoffAssembler::CallC(wasm::FunctionSig* sig,
1361 const LiftoffRegister* args,
1362 const LiftoffRegister* rets,
1363 ValueType out_argument_type,
int stack_bytes,
1364 ExternalReference ext_ref) {
1365 addiu(sp, sp, -stack_bytes);
1368 for (ValueType param_type : sig->parameters()) {
1369 liftoff::Store(
this, sp, arg_bytes, *args++, param_type);
1370 arg_bytes += ValueTypes::MemSize(param_type);
1372 DCHECK_LE(arg_bytes, stack_bytes);
1376 constexpr Register kFirstArgReg = a0;
1377 mov(kFirstArgReg, sp);
1380 constexpr
int kNumCCallArgs = 1;
1381 PrepareCallCFunction(kNumCCallArgs, kScratchReg);
1382 CallCFunction(ext_ref, kNumCCallArgs);
1385 const LiftoffRegister* next_result_reg = rets;
1386 if (sig->return_count() > 0) {
1387 DCHECK_EQ(1, sig->return_count());
1388 constexpr Register kReturnReg = v0;
1389 if (kReturnReg != next_result_reg->gp()) {
1390 Move(*next_result_reg, LiftoffRegister(kReturnReg), sig->GetReturn(0));
1396 if (out_argument_type != kWasmStmt) {
1397 liftoff::Load(
this, *next_result_reg, sp, 0, out_argument_type);
1400 addiu(sp, sp, stack_bytes);
1403 void LiftoffAssembler::CallNativeWasmCode(Address addr) {
1404 Call(addr, RelocInfo::WASM_CALL);
1407 void LiftoffAssembler::CallIndirect(wasm::FunctionSig* sig,
1408 compiler::CallDescriptor* call_descriptor,
1410 if (target == no_reg) {
1418 void LiftoffAssembler::CallRuntimeStub(WasmCode::RuntimeStubId sid) {
1421 Call(static_cast<Address>(sid), RelocInfo::WASM_STUB_CALL);
1424 void LiftoffAssembler::AllocateStackSlot(Register addr,
uint32_t size) {
1425 addiu(sp, sp, -size);
1426 TurboAssembler::Move(addr, sp);
1429 void LiftoffAssembler::DeallocateStackSlot(
uint32_t size) {
1430 addiu(sp, sp, size);
1433 void LiftoffStackSlots::Construct() {
1434 for (
auto& slot : slots_) {
1435 const LiftoffAssembler::VarState& src = slot.src_;
1436 switch (src.loc()) {
1437 case LiftoffAssembler::VarState::kStack: {
1438 if (src.type() == kWasmF64) {
1439 DCHECK_EQ(kLowWord, slot.half_);
1440 asm_->lw(kScratchReg,
1441 liftoff::GetHalfStackSlot(2 * slot.src_index_ - 1));
1442 asm_->push(kScratchReg);
1444 asm_->lw(kScratchReg,
1445 liftoff::GetHalfStackSlot(2 * slot.src_index_ +
1446 (slot.half_ == kLowWord ? 0 : 1)));
1447 asm_->push(kScratchReg);
1450 case LiftoffAssembler::VarState::kRegister:
1451 if (src.type() == kWasmI64) {
1453 asm_, slot.half_ == kLowWord ? src.reg().low() : src.reg().high(),
1456 liftoff::push(asm_, src.reg(), src.type());
1459 case LiftoffAssembler::VarState::KIntConst: {
1461 asm_->li(kScratchReg,
1462 Operand(slot.half_ == kLowWord ? src.i32_const()
1463 : src.i32_const() >> 31));
1464 asm_->push(kScratchReg);
1477 #endif // V8_WASM_BASELINE_MIPS_LIFTOFF_ASSEMBLER_MIPS_H_