5 #include "src/wasm/baseline/liftoff-compiler.h" 7 #include "src/assembler-inl.h" 8 #include "src/base/optional.h" 10 #include "src/compiler/linkage.h" 11 #include "src/compiler/wasm-compiler.h" 12 #include "src/counters.h" 13 #include "src/macro-assembler-inl.h" 14 #include "src/objects/smi.h" 15 #include "src/tracing/trace-event.h" 16 #include "src/wasm/baseline/liftoff-assembler.h" 17 #include "src/wasm/function-body-decoder-impl.h" 18 #include "src/wasm/function-compiler.h" 19 #include "src/wasm/memory-tracing.h" 20 #include "src/wasm/object-access.h" 21 #include "src/wasm/wasm-engine.h" 22 #include "src/wasm/wasm-linkage.h" 23 #include "src/wasm/wasm-objects.h" 24 #include "src/wasm/wasm-opcodes.h" 30 constexpr
auto kRegister = LiftoffAssembler::VarState::kRegister;
31 constexpr
auto KIntConst = LiftoffAssembler::VarState::KIntConst;
32 constexpr
auto kStack = LiftoffAssembler::VarState::kStack;
40 if (FLAG_trace_liftoff) PrintF("[liftoff] " __VA_ARGS__); \ 43 #define WASM_INSTANCE_OBJECT_OFFSET(name) \ 44 ObjectAccess::ToTagged(WasmInstanceObject::k##name##Offset) 46 template <
int expected_size,
int actual_size>
47 struct assert_field_size {
48 static_assert(expected_size == actual_size,
49 "field in WasmInstance does not have the expected size");
50 static constexpr
int size = actual_size;
53 #define WASM_INSTANCE_OBJECT_SIZE(name) \ 54 (WasmInstanceObject::k##name##OffsetEnd - \ 55 WasmInstanceObject::k##name##Offset + 1) // NOLINT(whitespace/indent) 57 #define LOAD_INSTANCE_FIELD(dst, name, load_size) \ 58 __ LoadFromInstance( \ 59 dst, WASM_INSTANCE_OBJECT_OFFSET(name), \ 60 assert_field_size<WASM_INSTANCE_OBJECT_SIZE(name), load_size>::size); 63 #define DEBUG_CODE_COMMENT(str) \ 65 __ RecordComment(str); \ 68 #define DEBUG_CODE_COMMENT(str) ((void)0) 71 constexpr LoadType::LoadTypeValue kPointerLoadType =
72 kPointerSize == 8 ? LoadType::kI64Load : LoadType::kI32Load;
74 #if V8_TARGET_ARCH_ARM64 80 MOVE_ONLY_NO_DEFAULT_CONSTRUCTOR(MovableLabel);
81 MovableLabel() : label_(new Label()) {}
83 Label*
get() {
return label_.get(); }
86 std::unique_ptr<Label> label_;
92 MOVE_ONLY_WITH_DEFAULT_CONSTRUCTORS(MovableLabel);
94 Label*
get() {
return &label_; }
101 compiler::CallDescriptor* GetLoweredCallDescriptor(
102 Zone* zone, compiler::CallDescriptor* call_desc) {
103 return kPointerSize == 4 ? compiler::GetI32WasmCallDescriptor(zone, call_desc)
107 constexpr ValueType kSupportedTypesArr[] = {kWasmI32, kWasmI64, kWasmF32,
109 constexpr Vector<const ValueType> kSupportedTypes =
110 ArrayVector(kSupportedTypesArr);
112 class LiftoffCompiler {
115 static constexpr Decoder::ValidateFlag validate = Decoder::kValidate;
117 using Value = ValueBase;
121 LiftoffAssembler::CacheState state;
124 struct Control :
public ControlWithNamedConstructors<Control, Value> {
125 MOVE_ONLY_WITH_DEFAULT_CONSTRUCTORS(Control);
127 std::unique_ptr<ElseState> else_state;
128 LiftoffAssembler::CacheState label_state;
132 using FullDecoder = WasmFullDecoder<validate, LiftoffCompiler>;
134 struct OutOfLineCode {
136 MovableLabel continuation;
137 WasmCode::RuntimeStubId stub;
138 WasmCodePosition position;
139 LiftoffRegList regs_to_save;
143 static OutOfLineCode Trap(WasmCode::RuntimeStubId s, WasmCodePosition pos,
146 return {{}, {}, s, pos, {}, pc};
148 static OutOfLineCode StackCheck(WasmCodePosition pos, LiftoffRegList regs) {
149 return {{}, {}, WasmCode::kWasmStackGuard, pos, regs, 0};
153 LiftoffCompiler(compiler::CallDescriptor* call_descriptor,
154 CompilationEnv* env, Zone* compilation_zone)
156 GetLoweredCallDescriptor(compilation_zone, call_descriptor)),
158 compilation_zone_(compilation_zone),
159 safepoint_table_builder_(compilation_zone_) {}
161 ~LiftoffCompiler() { BindUnboundLabels(
nullptr); }
163 bool ok()
const {
return ok_; }
165 void GetCode(CodeDesc* desc) { asm_.GetCode(
nullptr, desc); }
167 OwnedVector<uint8_t> GetSourcePositionTable() {
168 return source_position_table_builder_.ToSourcePositionTableVector();
171 OwnedVector<trap_handler::ProtectedInstructionData> GetProtectedInstructions()
173 return OwnedVector<trap_handler::ProtectedInstructionData>::Of(
174 protected_instructions_);
177 uint32_t GetTotalFrameSlotCount()
const {
178 return __ GetTotalFrameSlotCount();
181 void unsupported(FullDecoder* decoder,
const char* reason) {
183 TRACE(
"unsupported: %s\n", reason);
184 decoder->errorf(decoder->pc(),
"unsupported liftoff operation: %s", reason);
185 BindUnboundLabels(decoder);
188 bool DidAssemblerBailout(FullDecoder* decoder) {
189 if (decoder->failed() || !__ did_bailout())
return false;
190 unsupported(decoder, __ bailout_reason());
194 bool CheckSupportedType(FullDecoder* decoder,
195 Vector<const ValueType> supported_types,
196 ValueType type,
const char* context) {
199 for (ValueType supported : supported_types) {
200 if (type == supported)
return true;
202 SNPrintF(ArrayVector(buffer),
"%s %s", ValueTypes::TypeName(type), context);
203 unsupported(decoder, buffer);
207 int GetSafepointTableOffset()
const {
208 return safepoint_table_builder_.GetCodeOffset();
211 void BindUnboundLabels(FullDecoder* decoder) {
215 uint32_t control_depth = decoder ? decoder->control_depth() : 0;
217 Control* c = decoder->control_at(
i);
218 Label* label = c->label.get();
219 if (!label->is_bound()) __ bind(label);
221 Label* else_label = c->else_state->label.get();
222 if (!else_label->is_bound()) __ bind(else_label);
225 for (
auto& ool : out_of_line_code_) {
226 if (!ool.label.get()->is_bound()) __ bind(ool.label.get());
231 void StartFunction(FullDecoder* decoder) {
232 int num_locals = decoder->NumLocals();
233 __ set_num_locals(num_locals);
234 for (
int i = 0;
i < num_locals; ++
i) {
235 __ set_local_type(
i, decoder->GetLocalType(
i));
241 const int num_lowered_params = 1 + needs_reg_pair(type);
242 ValueType lowered_type = needs_reg_pair(type) ? kWasmI32 : type;
243 RegClass rc = reg_class_for(lowered_type);
245 LiftoffRegister reg = kGpCacheRegList.GetFirstRegSet();
246 LiftoffRegList pinned;
247 for (
int pair_idx = 0; pair_idx < num_lowered_params; ++pair_idx) {
248 compiler::LinkageLocation param_loc =
249 descriptor_->GetInputLocation(input_idx + pair_idx);
251 LiftoffRegister in_reg = kGpCacheRegList.GetFirstRegSet();
252 if (param_loc.IsRegister()) {
253 DCHECK(!param_loc.IsAnyRegister());
254 int reg_code = param_loc.AsRegister();
255 #if V8_TARGET_ARCH_ARM 260 DCHECK_IMPLIES(type == kWasmF32, (reg_code % 2) == 0);
261 if (type == kWasmF32) {
265 RegList cache_regs = rc == kGpReg ? kLiftoffAssemblerGpCacheRegs
266 : kLiftoffAssemblerFpCacheRegs;
267 if (cache_regs & (1ULL << reg_code)) {
269 in_reg = LiftoffRegister::from_code(rc, reg_code);
274 in_reg = __ GetUnusedRegister(rc, pinned);
276 __ Move(in_reg.gp(), Register::from_code(reg_code), lowered_type);
278 __ Move(in_reg.fp(), DoubleRegister::from_code(reg_code),
282 }
else if (param_loc.IsCallerFrameSlot()) {
283 in_reg = __ GetUnusedRegister(rc, pinned);
284 __ LoadCallerFrameSlot(in_reg, -param_loc.AsCallerFrameSlot(),
287 reg = pair_idx == 0 ? in_reg
288 : LiftoffRegister::ForPair(reg.gp(), in_reg.gp());
291 __ PushRegister(type, reg);
292 return num_lowered_params;
295 void StackCheck(WasmCodePosition position) {
296 if (FLAG_wasm_no_stack_checks || !env_->runtime_exception_support)
return;
297 out_of_line_code_.push_back(
298 OutOfLineCode::StackCheck(position, __ cache_state()->used_registers));
299 OutOfLineCode& ool = out_of_line_code_.back();
300 Register limit_address = __ GetUnusedRegister(kGpReg).gp();
301 LOAD_INSTANCE_FIELD(limit_address, StackLimitAddress, kPointerSize);
302 __ StackCheck(ool.label.get(), limit_address);
303 __ bind(ool.continuation.get());
306 void StartFunctionBody(FullDecoder* decoder, Control* block) {
308 if (!CheckSupportedType(decoder, kSupportedTypes, __ local_type(
i),
314 constexpr
int kInstanceParameterIndex = 1;
316 compiler::LinkageLocation instance_loc =
317 descriptor_->GetInputLocation(kInstanceParameterIndex);
318 DCHECK(instance_loc.IsRegister());
319 DCHECK(!instance_loc.IsAnyRegister());
320 Register instance_reg = Register::from_code(instance_loc.AsRegister());
321 DCHECK_EQ(kWasmInstanceRegister, instance_reg);
325 static_cast<uint32_t>(decoder->sig_->parameter_count());
327 __ EnterFrame(StackFrame::WASM_COMPILED);
328 __ set_has_frame(
true);
329 pc_offset_stack_frame_construction_ = __ PrepareStackFrame();
336 if (DidAssemblerBailout(decoder))
return;
338 __ SpillInstance(instance_reg);
340 uint32_t input_idx = kInstanceParameterIndex + 1;
341 for (
uint32_t param_idx = 0; param_idx < num_params; ++param_idx) {
342 input_idx += ProcessParameter(__ local_type(param_idx), input_idx);
344 DCHECK_EQ(input_idx, descriptor_->InputCount());
346 LiftoffRegister zero_double_reg = kGpCacheRegList.GetFirstRegSet();
347 DCHECK(zero_double_reg.is_gp());
348 for (
uint32_t param_idx = num_params; param_idx < __ num_locals();
350 ValueType type = decoder->GetLocalType(param_idx);
353 __ cache_state()->stack_state.emplace_back(kWasmI32,
uint32_t{0});
356 __ cache_state()->stack_state.emplace_back(kWasmI64,
uint32_t{0});
360 if (zero_double_reg.is_gp()) {
363 zero_double_reg = __ GetUnusedRegister(kFpReg);
365 __ LoadConstant(zero_double_reg, WasmValue(0.));
367 __ PushRegister(type, zero_double_reg);
373 block->label_state.stack_base = __ num_locals();
379 DCHECK_EQ(__ num_locals(), __ cache_state()->stack_height());
382 void GenerateOutOfLineCode(OutOfLineCode& ool) {
383 __ bind(ool.label.get());
384 const bool is_stack_check = ool.stub == WasmCode::kWasmStackGuard;
385 const bool is_mem_out_of_bounds =
386 ool.stub == WasmCode::kThrowWasmTrapMemOutOfBounds;
388 if (is_mem_out_of_bounds && env_->use_trap_handler) {
390 DCHECK_EQ(pc, __ pc_offset());
391 protected_instructions_.emplace_back(
392 trap_handler::ProtectedInstructionData{ool.pc, pc});
395 if (!env_->runtime_exception_support) {
399 DCHECK(!is_stack_check);
400 __ CallTrapCallbackForTesting();
401 __ LeaveFrame(StackFrame::WASM_COMPILED);
402 __ DropStackSlotsAndRet(
403 static_cast<uint32_t>(descriptor_->StackParameterCount()));
407 if (!ool.regs_to_save.is_empty()) __ PushRegisters(ool.regs_to_save);
409 source_position_table_builder_.AddPosition(
410 __ pc_offset(), SourcePosition(ool.position),
false);
411 __ CallRuntimeStub(ool.stub);
412 safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kSimple, 0,
413 Safepoint::kNoLazyDeopt);
414 DCHECK_EQ(ool.continuation.get()->is_bound(), is_stack_check);
415 if (!ool.regs_to_save.is_empty()) __ PopRegisters(ool.regs_to_save);
416 if (is_stack_check) {
417 __ emit_jump(ool.continuation.get());
419 __ AssertUnreachable(AbortReason::kUnexpectedReturnFromWasmTrap);
423 void FinishFunction(FullDecoder* decoder) {
424 if (DidAssemblerBailout(decoder))
return;
425 for (OutOfLineCode& ool : out_of_line_code_) {
426 GenerateOutOfLineCode(ool);
428 __ PatchPrepareStackFrame(pc_offset_stack_frame_construction_,
429 __ GetTotalFrameSlotCount());
431 safepoint_table_builder_.Emit(&asm_, __ GetTotalFrameSlotCount());
433 DidAssemblerBailout(decoder);
436 void OnFirstError(FullDecoder* decoder) {
438 BindUnboundLabels(decoder);
439 asm_.AbortCompilation();
442 void NextInstruction(FullDecoder* decoder, WasmOpcode opcode) {
443 TraceCacheState(decoder);
444 SLOW_DCHECK(__ ValidateCacheState());
445 DEBUG_CODE_COMMENT(WasmOpcodes::OpcodeName(opcode));
448 void Block(FullDecoder* decoder, Control* block) {
449 block->label_state.stack_base = __ cache_state()->stack_height();
452 void Loop(FullDecoder* decoder, Control* loop) {
453 loop->label_state.stack_base = __ cache_state()->stack_height();
463 __ bind(loop->label.get());
466 loop->label_state.Split(*__ cache_state());
469 StackCheck(decoder->position());
472 void Try(FullDecoder* decoder, Control* block) {
473 unsupported(decoder,
"try");
476 void If(FullDecoder* decoder,
const Value& cond, Control* if_block) {
477 DCHECK_EQ(if_block, decoder->control_at(0));
478 DCHECK(if_block->is_if());
480 if (if_block->start_merge.arity > 0 || if_block->end_merge.arity > 1)
481 return unsupported(decoder,
"multi-value if");
484 if_block->else_state = base::make_unique<ElseState>();
487 Register value = __ PopToRegister().gp();
488 __ emit_cond_jump(kEqual, if_block->else_state->label.get(), kWasmI32,
491 if_block->label_state.stack_base = __ cache_state()->stack_height();
493 if_block->else_state->state.Split(*__ cache_state());
496 void FallThruTo(FullDecoder* decoder, Control* c) {
497 if (c->end_merge.reached) {
498 __ MergeFullStackWith(c->label_state);
499 }
else if (c->is_onearmed_if()) {
500 c->label_state.InitMerge(*__ cache_state(), __ num_locals(),
501 c->br_merge()->arity);
502 __ MergeFullStackWith(c->label_state);
504 c->label_state.Split(*__ cache_state());
506 TraceCacheState(decoder);
509 void PopControl(FullDecoder* decoder, Control* c) {
510 if (!c->is_loop() && c->end_merge.reached) {
511 __ cache_state()->Steal(c->label_state);
513 if (!c->label.get()->is_bound()) {
514 __ bind(c->label.get());
518 void EndControl(FullDecoder* decoder, Control* c) {}
520 enum CCallReturn :
bool { kHasReturn =
true, kNoReturn =
false };
522 void GenerateCCall(
const LiftoffRegister* result_regs, FunctionSig* sig,
523 ValueType out_argument_type,
524 const LiftoffRegister* arg_regs,
525 ExternalReference ext_ref) {
527 __ SpillAllRegisters();
531 for (ValueType param_type : sig->parameters()) {
532 param_bytes += ValueTypes::MemSize(param_type);
534 int out_arg_bytes = out_argument_type == kWasmStmt
536 : ValueTypes::MemSize(out_argument_type);
537 int stack_bytes = std::max(param_bytes, out_arg_bytes);
538 __ CallC(sig, arg_regs, result_regs, out_argument_type, stack_bytes,
542 template <ValueType src_type, ValueType result_type,
class EmitFn>
543 void EmitUnOp(EmitFn fn) {
544 static RegClass src_rc = reg_class_for(src_type);
545 static RegClass result_rc = reg_class_for(result_type);
546 LiftoffRegister src = __ PopToRegister();
547 LiftoffRegister dst = src_rc == result_rc
548 ? __ GetUnusedRegister(result_rc, {src})
549 : __ GetUnusedRegister(result_rc);
551 __ PushRegister(result_type, dst);
554 void EmitI32UnOpWithCFallback(
bool (LiftoffAssembler::*emit_fn)(Register,
556 ExternalReference (*fallback_fn)()) {
557 auto emit_with_c_fallback = [=](LiftoffRegister dst, LiftoffRegister src) {
558 if (emit_fn && (asm_.*emit_fn)(dst.gp(), src.gp()))
return;
559 ExternalReference ext_ref = fallback_fn();
560 ValueType sig_i_i_reps[] = {kWasmI32, kWasmI32};
561 FunctionSig sig_i_i(1, 1, sig_i_i_reps);
562 GenerateCCall(&dst, &sig_i_i, kWasmStmt, &src, ext_ref);
564 EmitUnOp<kWasmI32, kWasmI32>(emit_with_c_fallback);
567 template <ValueType type>
568 void EmitFloatUnOpWithCFallback(
569 bool (LiftoffAssembler::*emit_fn)(DoubleRegister, DoubleRegister),
570 ExternalReference (*fallback_fn)()) {
571 auto emit_with_c_fallback = [=](LiftoffRegister dst, LiftoffRegister src) {
572 if ((asm_.*emit_fn)(dst.fp(), src.fp()))
return;
573 ExternalReference ext_ref = fallback_fn();
574 ValueType sig_reps[] = {type};
575 FunctionSig sig(0, 1, sig_reps);
576 GenerateCCall(&dst, &sig, type, &src, ext_ref);
578 EmitUnOp<type, type>(emit_with_c_fallback);
581 enum TypeConversionTrapping :
bool { kCanTrap =
true, kNoTrap =
false };
582 template <ValueType dst_type, ValueType src_type,
583 TypeConversionTrapping can_trap>
584 void EmitTypeConversion(WasmOpcode opcode, ExternalReference (*fallback_fn)(),
585 WasmCodePosition trap_position) {
586 static constexpr RegClass src_rc = reg_class_for(src_type);
587 static constexpr RegClass dst_rc = reg_class_for(dst_type);
588 LiftoffRegister src = __ PopToRegister();
589 LiftoffRegister dst = src_rc == dst_rc ? __ GetUnusedRegister(dst_rc, {src})
590 : __ GetUnusedRegister(dst_rc);
591 DCHECK_EQ(!!can_trap, trap_position > 0);
592 Label* trap = can_trap ? AddOutOfLineTrap(
594 WasmCode::kThrowWasmTrapFloatUnrepresentable)
596 if (!__ emit_type_conversion(opcode, dst, src, trap)) {
597 DCHECK_NOT_NULL(fallback_fn);
598 ExternalReference ext_ref = fallback_fn();
601 ValueType sig_reps[] = {kWasmI32, src_type};
602 FunctionSig sig(1, 1, sig_reps);
603 LiftoffRegister ret_reg =
604 __ GetUnusedRegister(kGpReg, LiftoffRegList::ForRegs(dst));
605 LiftoffRegister dst_regs[] = {ret_reg, dst};
606 GenerateCCall(dst_regs, &sig, dst_type, &src, ext_ref);
607 __ emit_cond_jump(kEqual, trap, kWasmI32, ret_reg.gp());
609 ValueType sig_reps[] = {src_type};
610 FunctionSig sig(0, 1, sig_reps);
611 GenerateCCall(&dst, &sig, dst_type, &src, ext_ref);
614 __ PushRegister(dst_type, dst);
617 void UnOp(FullDecoder* decoder, WasmOpcode opcode, FunctionSig*,
619 #define CASE_I32_UNOP(opcode, fn) \ 620 case WasmOpcode::kExpr##opcode: \ 621 EmitUnOp<kWasmI32, kWasmI32>( \ 622 [=](LiftoffRegister dst, LiftoffRegister src) { \ 623 __ emit_##fn(dst.gp(), src.gp()); \ 626 #define CASE_I32_SIGN_EXTENSION(opcode, fn) \ 627 case WasmOpcode::kExpr##opcode: \ 628 EmitUnOp<kWasmI32, kWasmI32>( \ 629 [=](LiftoffRegister dst, LiftoffRegister src) { \ 630 __ emit_##fn(dst.gp(), src.gp()); \ 633 #define CASE_I64_SIGN_EXTENSION(opcode, fn) \ 634 case WasmOpcode::kExpr##opcode: \ 635 EmitUnOp<kWasmI64, kWasmI64>( \ 636 [=](LiftoffRegister dst, LiftoffRegister src) { \ 637 __ emit_##fn(dst, src); \ 640 #define CASE_FLOAT_UNOP(opcode, type, fn) \ 641 case WasmOpcode::kExpr##opcode: \ 642 EmitUnOp<kWasm##type, kWasm##type>( \ 643 [=](LiftoffRegister dst, LiftoffRegister src) { \ 644 __ emit_##fn(dst.fp(), src.fp()); \ 647 #define CASE_FLOAT_UNOP_WITH_CFALLBACK(opcode, type, fn) \ 648 case WasmOpcode::kExpr##opcode: \ 649 EmitFloatUnOpWithCFallback<kWasm##type>(&LiftoffAssembler::emit_##fn, \ 650 &ExternalReference::wasm_##fn); \ 652 #define CASE_TYPE_CONVERSION(opcode, dst_type, src_type, ext_ref, can_trap) \ 653 case WasmOpcode::kExpr##opcode: \ 654 EmitTypeConversion<kWasm##dst_type, kWasm##src_type, can_trap>( \ 655 kExpr##opcode, ext_ref, can_trap ? decoder->position() : 0); \ 658 CASE_I32_UNOP(I32Eqz, i32_eqz)
659 CASE_I32_UNOP(I32Clz, i32_clz)
660 CASE_I32_UNOP(I32Ctz, i32_ctz)
661 CASE_FLOAT_UNOP(F32Abs, F32, f32_abs)
662 CASE_FLOAT_UNOP(F32Neg, F32, f32_neg)
663 CASE_FLOAT_UNOP_WITH_CFALLBACK(F32Ceil, F32, f32_ceil)
664 CASE_FLOAT_UNOP_WITH_CFALLBACK(F32Floor, F32, f32_floor)
665 CASE_FLOAT_UNOP_WITH_CFALLBACK(F32Trunc, F32, f32_trunc)
666 CASE_FLOAT_UNOP_WITH_CFALLBACK(F32NearestInt, F32, f32_nearest_int)
667 CASE_FLOAT_UNOP(F32Sqrt, F32, f32_sqrt)
668 CASE_FLOAT_UNOP(F64Abs, F64, f64_abs)
669 CASE_FLOAT_UNOP(F64Neg, F64, f64_neg)
670 CASE_FLOAT_UNOP_WITH_CFALLBACK(F64Ceil, F64, f64_ceil)
671 CASE_FLOAT_UNOP_WITH_CFALLBACK(F64Floor, F64, f64_floor)
672 CASE_FLOAT_UNOP_WITH_CFALLBACK(F64Trunc, F64, f64_trunc)
673 CASE_FLOAT_UNOP_WITH_CFALLBACK(F64NearestInt, F64, f64_nearest_int)
674 CASE_FLOAT_UNOP(F64Sqrt, F64, f64_sqrt)
675 CASE_TYPE_CONVERSION(I32ConvertI64, I32, I64,
nullptr, kNoTrap)
676 CASE_TYPE_CONVERSION(I32SConvertF32, I32, F32,
nullptr, kCanTrap)
677 CASE_TYPE_CONVERSION(I32UConvertF32, I32, F32,
nullptr, kCanTrap)
678 CASE_TYPE_CONVERSION(I32SConvertF64, I32, F64,
nullptr, kCanTrap)
679 CASE_TYPE_CONVERSION(I32UConvertF64, I32, F64,
nullptr, kCanTrap)
680 CASE_TYPE_CONVERSION(I32ReinterpretF32, I32, F32,
nullptr, kNoTrap)
681 CASE_TYPE_CONVERSION(I64SConvertI32, I64, I32,
nullptr, kNoTrap)
682 CASE_TYPE_CONVERSION(I64UConvertI32, I64, I32,
nullptr, kNoTrap)
683 CASE_TYPE_CONVERSION(I64SConvertF32, I64, F32,
684 &ExternalReference::wasm_float32_to_int64, kCanTrap)
685 CASE_TYPE_CONVERSION(I64UConvertF32, I64, F32,
686 &ExternalReference::wasm_float32_to_uint64, kCanTrap)
687 CASE_TYPE_CONVERSION(I64SConvertF64, I64, F64,
688 &ExternalReference::wasm_float64_to_int64, kCanTrap)
689 CASE_TYPE_CONVERSION(I64UConvertF64, I64, F64,
690 &ExternalReference::wasm_float64_to_uint64, kCanTrap)
691 CASE_TYPE_CONVERSION(I64ReinterpretF64, I64, F64,
nullptr, kNoTrap)
692 CASE_TYPE_CONVERSION(F32SConvertI32, F32, I32,
nullptr, kNoTrap)
693 CASE_TYPE_CONVERSION(F32UConvertI32, F32, I32,
nullptr, kNoTrap)
694 CASE_TYPE_CONVERSION(F32SConvertI64, F32, I64,
695 &ExternalReference::wasm_int64_to_float32, kNoTrap)
696 CASE_TYPE_CONVERSION(F32UConvertI64, F32, I64,
697 &ExternalReference::wasm_uint64_to_float32, kNoTrap)
698 CASE_TYPE_CONVERSION(F32ConvertF64, F32, F64,
nullptr, kNoTrap)
699 CASE_TYPE_CONVERSION(F32ReinterpretI32, F32, I32,
nullptr, kNoTrap)
700 CASE_TYPE_CONVERSION(F64SConvertI32, F64, I32,
nullptr, kNoTrap)
701 CASE_TYPE_CONVERSION(F64UConvertI32, F64, I32,
nullptr, kNoTrap)
702 CASE_TYPE_CONVERSION(F64SConvertI64, F64, I64,
703 &ExternalReference::wasm_int64_to_float64, kNoTrap)
704 CASE_TYPE_CONVERSION(F64UConvertI64, F64, I64,
705 &ExternalReference::wasm_uint64_to_float64, kNoTrap)
706 CASE_TYPE_CONVERSION(F64ConvertF32, F64, F32,
nullptr, kNoTrap)
707 CASE_TYPE_CONVERSION(F64ReinterpretI64, F64, I64,
nullptr, kNoTrap)
708 CASE_I32_SIGN_EXTENSION(I32SExtendI8, i32_signextend_i8)
709 CASE_I32_SIGN_EXTENSION(I32SExtendI16, i32_signextend_i16)
710 CASE_I64_SIGN_EXTENSION(I64SExtendI8, i64_signextend_i8)
711 CASE_I64_SIGN_EXTENSION(I64SExtendI16, i64_signextend_i16)
712 CASE_I64_SIGN_EXTENSION(I64SExtendI32, i64_signextend_i32)
714 EmitI32UnOpWithCFallback(&LiftoffAssembler::emit_i32_popcnt,
715 &ExternalReference::wasm_word32_popcnt);
717 case WasmOpcode::kExprI64Eqz:
718 EmitUnOp<kWasmI64, kWasmI32>(
719 [=](LiftoffRegister dst, LiftoffRegister src) {
720 __ emit_i64_eqz(dst.gp(), src);
724 return unsupported(decoder, WasmOpcodes::OpcodeName(opcode));
727 #undef CASE_I32_SIGN_EXTENSION 728 #undef CASE_I64_SIGN_EXTENSION 729 #undef CASE_FLOAT_UNOP 730 #undef CASE_FLOAT_UNOP_WITH_CFALLBACK 731 #undef CASE_TYPE_CONVERSION 734 template <ValueType src_type, ValueType result_type,
typename EmitFn>
735 void EmitBinOp(EmitFn fn) {
736 static constexpr RegClass src_rc = reg_class_for(src_type);
737 static constexpr RegClass result_rc = reg_class_for(result_type);
738 LiftoffRegister rhs = __ PopToRegister();
739 LiftoffRegister lhs = __ PopToRegister(LiftoffRegList::ForRegs(rhs));
740 LiftoffRegister dst = src_rc == result_rc
741 ? __ GetUnusedRegister(result_rc, {lhs, rhs})
742 : __ GetUnusedRegister(result_rc);
744 __ PushRegister(result_type, dst);
747 void EmitDivOrRem64CCall(LiftoffRegister dst, LiftoffRegister lhs,
748 LiftoffRegister rhs, ExternalReference ext_ref,
750 Label* trap_unrepresentable =
nullptr) {
752 LiftoffRegister ret =
753 __ GetUnusedRegister(kGpReg, LiftoffRegList::ForRegs(dst));
754 LiftoffRegister tmp =
755 __ GetUnusedRegister(kGpReg, LiftoffRegList::ForRegs(dst, ret));
756 LiftoffRegister arg_regs[] = {lhs, rhs};
757 LiftoffRegister result_regs[] = {ret, dst};
758 ValueType sig_types[] = {kWasmI32, kWasmI64, kWasmI64};
760 FunctionSig sig(1, 2, sig_types);
761 GenerateCCall(result_regs, &sig, kWasmI64, arg_regs, ext_ref);
762 __ LoadConstant(tmp, WasmValue(int32_t{0}));
763 __ emit_cond_jump(kEqual, trap_by_zero, kWasmI32, ret.gp(), tmp.gp());
764 if (trap_unrepresentable) {
765 __ LoadConstant(tmp, WasmValue(int32_t{-1}));
766 __ emit_cond_jump(kEqual, trap_unrepresentable, kWasmI32, ret.gp(),
771 void BinOp(FullDecoder* decoder, WasmOpcode opcode, FunctionSig*,
773 #define CASE_I32_BINOP(opcode, fn) \ 774 case WasmOpcode::kExpr##opcode: \ 775 return EmitBinOp<kWasmI32, kWasmI32>( \ 776 [=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \ 777 __ emit_##fn(dst.gp(), lhs.gp(), rhs.gp()); \ 779 #define CASE_I64_BINOP(opcode, fn) \ 780 case WasmOpcode::kExpr##opcode: \ 781 return EmitBinOp<kWasmI64, kWasmI64>( \ 782 [=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \ 783 __ emit_##fn(dst, lhs, rhs); \ 785 #define CASE_FLOAT_BINOP(opcode, type, fn) \ 786 case WasmOpcode::kExpr##opcode: \ 787 return EmitBinOp<kWasm##type, kWasm##type>( \ 788 [=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \ 789 __ emit_##fn(dst.fp(), lhs.fp(), rhs.fp()); \ 791 #define CASE_I32_CMPOP(opcode, cond) \ 792 case WasmOpcode::kExpr##opcode: \ 793 return EmitBinOp<kWasmI32, kWasmI32>( \ 794 [=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \ 795 __ emit_i32_set_cond(cond, dst.gp(), lhs.gp(), rhs.gp()); \ 797 #define CASE_I64_CMPOP(opcode, cond) \ 798 case WasmOpcode::kExpr##opcode: \ 799 return EmitBinOp<kWasmI64, kWasmI32>( \ 800 [=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \ 801 __ emit_i64_set_cond(cond, dst.gp(), lhs, rhs); \ 803 #define CASE_F32_CMPOP(opcode, cond) \ 804 case WasmOpcode::kExpr##opcode: \ 805 return EmitBinOp<kWasmF32, kWasmI32>( \ 806 [=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \ 807 __ emit_f32_set_cond(cond, dst.gp(), lhs.fp(), rhs.fp()); \ 809 #define CASE_F64_CMPOP(opcode, cond) \ 810 case WasmOpcode::kExpr##opcode: \ 811 return EmitBinOp<kWasmF64, kWasmI32>( \ 812 [=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \ 813 __ emit_f64_set_cond(cond, dst.gp(), lhs.fp(), rhs.fp()); \ 815 #define CASE_I32_SHIFTOP(opcode, fn) \ 816 case WasmOpcode::kExpr##opcode: \ 817 return EmitBinOp<kWasmI32, kWasmI32>( \ 818 [=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \ 819 __ emit_##fn(dst.gp(), lhs.gp(), rhs.gp(), {}); \ 821 #define CASE_I64_SHIFTOP(opcode, fn) \ 822 case WasmOpcode::kExpr##opcode: \ 823 return EmitBinOp<kWasmI64, kWasmI64>([=](LiftoffRegister dst, \ 824 LiftoffRegister src, \ 825 LiftoffRegister amount) { \ 826 __ emit_##fn(dst, src, amount.is_pair() ? amount.low_gp() : amount.gp(), \ 829 #define CASE_CCALL_BINOP(opcode, type, ext_ref_fn) \ 830 case WasmOpcode::kExpr##opcode: \ 831 return EmitBinOp<kWasmI32, kWasmI32>( \ 832 [=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \ 833 LiftoffRegister args[] = {lhs, rhs}; \ 834 auto ext_ref = ExternalReference::ext_ref_fn(); \ 835 ValueType sig_i_ii_reps[] = {kWasmI32, kWasmI32, kWasmI32}; \ 836 FunctionSig sig_i_ii(1, 2, sig_i_ii_reps); \ 837 GenerateCCall(&dst, &sig_i_ii, kWasmStmt, args, ext_ref); \ 840 CASE_I32_BINOP(I32Add, i32_add)
841 CASE_I32_BINOP(I32Sub, i32_sub)
842 CASE_I32_BINOP(I32Mul, i32_mul)
843 CASE_I32_BINOP(I32And, i32_and)
844 CASE_I32_BINOP(I32Ior, i32_or)
845 CASE_I32_BINOP(I32Xor, i32_xor)
846 CASE_I64_BINOP(I64And, i64_and)
847 CASE_I64_BINOP(I64Ior, i64_or)
848 CASE_I64_BINOP(I64Xor, i64_xor)
849 CASE_I32_CMPOP(I32Eq, kEqual)
850 CASE_I32_CMPOP(I32Ne, kUnequal)
851 CASE_I32_CMPOP(I32LtS, kSignedLessThan)
852 CASE_I32_CMPOP(I32LtU, kUnsignedLessThan)
853 CASE_I32_CMPOP(I32GtS, kSignedGreaterThan)
854 CASE_I32_CMPOP(I32GtU, kUnsignedGreaterThan)
855 CASE_I32_CMPOP(I32LeS, kSignedLessEqual)
856 CASE_I32_CMPOP(I32LeU, kUnsignedLessEqual)
857 CASE_I32_CMPOP(I32GeS, kSignedGreaterEqual)
858 CASE_I32_CMPOP(I32GeU, kUnsignedGreaterEqual)
859 CASE_I64_BINOP(I64Add, i64_add)
860 CASE_I64_BINOP(I64Sub, i64_sub)
861 CASE_I64_BINOP(I64Mul, i64_mul)
862 CASE_I64_CMPOP(I64Eq, kEqual)
863 CASE_I64_CMPOP(I64Ne, kUnequal)
864 CASE_I64_CMPOP(I64LtS, kSignedLessThan)
865 CASE_I64_CMPOP(I64LtU, kUnsignedLessThan)
866 CASE_I64_CMPOP(I64GtS, kSignedGreaterThan)
867 CASE_I64_CMPOP(I64GtU, kUnsignedGreaterThan)
868 CASE_I64_CMPOP(I64LeS, kSignedLessEqual)
869 CASE_I64_CMPOP(I64LeU, kUnsignedLessEqual)
870 CASE_I64_CMPOP(I64GeS, kSignedGreaterEqual)
871 CASE_I64_CMPOP(I64GeU, kUnsignedGreaterEqual)
872 CASE_F32_CMPOP(F32Eq, kEqual)
873 CASE_F32_CMPOP(F32Ne, kUnequal)
874 CASE_F32_CMPOP(F32Lt, kUnsignedLessThan)
875 CASE_F32_CMPOP(F32Gt, kUnsignedGreaterThan)
876 CASE_F32_CMPOP(F32Le, kUnsignedLessEqual)
877 CASE_F32_CMPOP(F32Ge, kUnsignedGreaterEqual)
878 CASE_F64_CMPOP(F64Eq, kEqual)
879 CASE_F64_CMPOP(F64Ne, kUnequal)
880 CASE_F64_CMPOP(F64Lt, kUnsignedLessThan)
881 CASE_F64_CMPOP(F64Gt, kUnsignedGreaterThan)
882 CASE_F64_CMPOP(F64Le, kUnsignedLessEqual)
883 CASE_F64_CMPOP(F64Ge, kUnsignedGreaterEqual)
884 CASE_I32_SHIFTOP(I32Shl, i32_shl)
885 CASE_I32_SHIFTOP(I32ShrS, i32_sar)
886 CASE_I32_SHIFTOP(I32ShrU, i32_shr)
887 CASE_I64_SHIFTOP(I64Shl, i64_shl)
888 CASE_I64_SHIFTOP(I64ShrS, i64_sar)
889 CASE_I64_SHIFTOP(I64ShrU, i64_shr)
890 CASE_CCALL_BINOP(I32Rol, I32, wasm_word32_rol)
891 CASE_CCALL_BINOP(I32Ror, I32, wasm_word32_ror)
892 CASE_FLOAT_BINOP(F32Add, F32, f32_add)
893 CASE_FLOAT_BINOP(F32Sub, F32, f32_sub)
894 CASE_FLOAT_BINOP(F32Mul, F32, f32_mul)
895 CASE_FLOAT_BINOP(F32Div, F32, f32_div)
896 CASE_FLOAT_BINOP(F32Min, F32, f32_min)
897 CASE_FLOAT_BINOP(F32Max, F32, f32_max)
898 CASE_FLOAT_BINOP(F32CopySign, F32, f32_copysign)
899 CASE_FLOAT_BINOP(F64Add, F64, f64_add)
900 CASE_FLOAT_BINOP(F64Sub, F64, f64_sub)
901 CASE_FLOAT_BINOP(F64Mul, F64, f64_mul)
902 CASE_FLOAT_BINOP(F64Div, F64, f64_div)
903 CASE_FLOAT_BINOP(F64Min, F64, f64_min)
904 CASE_FLOAT_BINOP(F64Max, F64, f64_max)
905 CASE_FLOAT_BINOP(F64CopySign, F64, f64_copysign)
906 case WasmOpcode::kExprI32DivS:
907 EmitBinOp<kWasmI32, kWasmI32>([
this, decoder](LiftoffRegister dst,
909 LiftoffRegister rhs) {
910 WasmCodePosition position = decoder->position();
911 AddOutOfLineTrap(position, WasmCode::kThrowWasmTrapDivByZero);
914 AddOutOfLineTrap(position,
915 WasmCode::kThrowWasmTrapDivUnrepresentable);
916 Label* div_by_zero = out_of_line_code_.end()[-2].label.get();
917 Label* div_unrepresentable = out_of_line_code_.end()[-1].label.get();
918 __ emit_i32_divs(dst.gp(), lhs.gp(), rhs.gp(), div_by_zero,
919 div_unrepresentable);
922 case WasmOpcode::kExprI32DivU:
923 EmitBinOp<kWasmI32, kWasmI32>([
this, decoder](LiftoffRegister dst,
925 LiftoffRegister rhs) {
926 Label* div_by_zero = AddOutOfLineTrap(
927 decoder->position(), WasmCode::kThrowWasmTrapDivByZero);
928 __ emit_i32_divu(dst.gp(), lhs.gp(), rhs.gp(), div_by_zero);
931 case WasmOpcode::kExprI32RemS:
932 EmitBinOp<kWasmI32, kWasmI32>([
this, decoder](LiftoffRegister dst,
934 LiftoffRegister rhs) {
935 Label* rem_by_zero = AddOutOfLineTrap(
936 decoder->position(), WasmCode::kThrowWasmTrapRemByZero);
937 __ emit_i32_rems(dst.gp(), lhs.gp(), rhs.gp(), rem_by_zero);
940 case WasmOpcode::kExprI32RemU:
941 EmitBinOp<kWasmI32, kWasmI32>([
this, decoder](LiftoffRegister dst,
943 LiftoffRegister rhs) {
944 Label* rem_by_zero = AddOutOfLineTrap(
945 decoder->position(), WasmCode::kThrowWasmTrapRemByZero);
946 __ emit_i32_remu(dst.gp(), lhs.gp(), rhs.gp(), rem_by_zero);
949 case WasmOpcode::kExprI64DivS:
950 EmitBinOp<kWasmI64, kWasmI64>([
this, decoder](LiftoffRegister dst,
952 LiftoffRegister rhs) {
953 WasmCodePosition position = decoder->position();
954 AddOutOfLineTrap(position, WasmCode::kThrowWasmTrapDivByZero);
957 AddOutOfLineTrap(position,
958 WasmCode::kThrowWasmTrapDivUnrepresentable);
959 Label* div_by_zero = out_of_line_code_.end()[-2].label.get();
960 Label* div_unrepresentable = out_of_line_code_.end()[-1].label.get();
961 if (!__ emit_i64_divs(dst, lhs, rhs, div_by_zero,
962 div_unrepresentable)) {
963 ExternalReference ext_ref = ExternalReference::wasm_int64_div();
964 EmitDivOrRem64CCall(dst, lhs, rhs, ext_ref, div_by_zero,
965 div_unrepresentable);
969 case WasmOpcode::kExprI64DivU:
970 EmitBinOp<kWasmI64, kWasmI64>([
this, decoder](LiftoffRegister dst,
972 LiftoffRegister rhs) {
973 Label* div_by_zero = AddOutOfLineTrap(
974 decoder->position(), WasmCode::kThrowWasmTrapDivByZero);
975 if (!__ emit_i64_divu(dst, lhs, rhs, div_by_zero)) {
976 ExternalReference ext_ref = ExternalReference::wasm_uint64_div();
977 EmitDivOrRem64CCall(dst, lhs, rhs, ext_ref, div_by_zero);
981 case WasmOpcode::kExprI64RemS:
982 EmitBinOp<kWasmI64, kWasmI64>([
this, decoder](LiftoffRegister dst,
984 LiftoffRegister rhs) {
985 Label* rem_by_zero = AddOutOfLineTrap(
986 decoder->position(), WasmCode::kThrowWasmTrapRemByZero);
987 if (!__ emit_i64_rems(dst, lhs, rhs, rem_by_zero)) {
988 ExternalReference ext_ref = ExternalReference::wasm_int64_mod();
989 EmitDivOrRem64CCall(dst, lhs, rhs, ext_ref, rem_by_zero);
993 case WasmOpcode::kExprI64RemU:
994 EmitBinOp<kWasmI64, kWasmI64>([
this, decoder](LiftoffRegister dst,
996 LiftoffRegister rhs) {
997 Label* rem_by_zero = AddOutOfLineTrap(
998 decoder->position(), WasmCode::kThrowWasmTrapRemByZero);
999 if (!__ emit_i64_remu(dst, lhs, rhs, rem_by_zero)) {
1000 ExternalReference ext_ref = ExternalReference::wasm_uint64_mod();
1001 EmitDivOrRem64CCall(dst, lhs, rhs, ext_ref, rem_by_zero);
1006 return unsupported(decoder, WasmOpcodes::OpcodeName(opcode));
1008 #undef CASE_I32_BINOP 1009 #undef CASE_I64_BINOP 1010 #undef CASE_FLOAT_BINOP 1011 #undef CASE_I32_CMPOP 1012 #undef CASE_I64_CMPOP 1013 #undef CASE_F32_CMPOP 1014 #undef CASE_F64_CMPOP 1015 #undef CASE_I32_SHIFTOP 1016 #undef CASE_I64_SHIFTOP 1017 #undef CASE_CCALL_BINOP 1020 void I32Const(FullDecoder* decoder,
Value* result, int32_t value) {
1021 __ cache_state()->stack_state.emplace_back(kWasmI32, value);
1024 void I64Const(FullDecoder* decoder,
Value* result,
int64_t value) {
1029 int32_t value_i32 =
static_cast<int32_t
>(value);
1030 if (value_i32 == value) {
1031 __ cache_state()->stack_state.emplace_back(kWasmI64, value_i32);
1033 LiftoffRegister reg = __ GetUnusedRegister(reg_class_for(kWasmI64));
1034 __ LoadConstant(reg, WasmValue(value));
1035 __ PushRegister(kWasmI64, reg);
1039 void F32Const(FullDecoder* decoder,
Value* result,
float value) {
1040 LiftoffRegister reg = __ GetUnusedRegister(kFpReg);
1041 __ LoadConstant(reg, WasmValue(value));
1042 __ PushRegister(kWasmF32, reg);
1045 void F64Const(FullDecoder* decoder,
Value* result,
double value) {
1046 LiftoffRegister reg = __ GetUnusedRegister(kFpReg);
1047 __ LoadConstant(reg, WasmValue(value));
1048 __ PushRegister(kWasmF64, reg);
1051 void RefNull(FullDecoder* decoder,
Value* result) {
1052 unsupported(decoder,
"ref_null");
1055 void Drop(FullDecoder* decoder,
const Value& value) {
1056 auto& slot = __ cache_state()->stack_state.back();
1058 if (slot.is_reg()) __ cache_state()->dec_used(slot.reg());
1059 __ cache_state()->stack_state.pop_back();
1062 void DoReturn(FullDecoder* decoder, Vector<Value> values,
bool implicit) {
1064 DCHECK_EQ(1, decoder->control_depth());
1065 Control* func_block = decoder->control_at(0);
1066 __ bind(func_block->label.get());
1067 __ cache_state()->Steal(func_block->label_state);
1069 if (!values.is_empty()) {
1070 if (values.size() > 1)
return unsupported(decoder,
"multi-return");
1071 LiftoffRegister reg = __ PopToRegister();
1072 LiftoffRegister return_reg =
1073 kNeedI64RegPair && values[0].type == kWasmI64
1074 ? LiftoffRegister::ForPair(kGpReturnRegisters[0],
1075 kGpReturnRegisters[1])
1076 : reg_class_for(values[0].type) == kGpReg
1077 ? LiftoffRegister(kGpReturnRegisters[0])
1078 : LiftoffRegister(kFpReturnRegisters[0]);
1079 if (reg != return_reg) __ Move(return_reg, reg, values[0].type);
1081 __ LeaveFrame(StackFrame::WASM_COMPILED);
1082 __ DropStackSlotsAndRet(
1083 static_cast<uint32_t>(descriptor_->StackParameterCount()));
1086 void GetLocal(FullDecoder* decoder,
Value* result,
1087 const LocalIndexImmediate<validate>& imm) {
1088 auto& slot = __ cache_state()->stack_state[imm.index];
1089 DCHECK_EQ(slot.type(), imm.type);
1090 switch (slot.loc()) {
1092 __ PushRegister(slot.type(), slot.reg());
1095 __ cache_state()->stack_state.emplace_back(imm.type, slot.i32_const());
1098 auto rc = reg_class_for(imm.type);
1099 LiftoffRegister reg = __ GetUnusedRegister(rc);
1100 __ Fill(reg, imm.index, imm.type);
1101 __ PushRegister(slot.type(), reg);
1107 void SetLocalFromStackSlot(LiftoffAssembler::VarState& dst_slot,
1109 auto& state = *__ cache_state();
1110 ValueType type = dst_slot.type();
1111 if (dst_slot.is_reg()) {
1112 LiftoffRegister slot_reg = dst_slot.reg();
1113 if (state.get_use_count(slot_reg) == 1) {
1114 __ Fill(dst_slot.reg(), state.stack_height() - 1, type);
1117 state.dec_used(slot_reg);
1118 dst_slot.MakeStack();
1120 DCHECK_EQ(type, __ local_type(local_index));
1121 RegClass rc = reg_class_for(type);
1122 LiftoffRegister dst_reg = __ GetUnusedRegister(rc);
1123 __ Fill(dst_reg, __ cache_state()->stack_height() - 1, type);
1124 dst_slot = LiftoffAssembler::VarState(type, dst_reg);
1125 __ cache_state()->inc_used(dst_reg);
1128 void SetLocal(
uint32_t local_index,
bool is_tee) {
1129 auto& state = *__ cache_state();
1130 auto& source_slot = state.stack_state.back();
1131 auto& target_slot = state.stack_state[local_index];
1132 switch (source_slot.loc()) {
1134 if (target_slot.is_reg()) state.dec_used(target_slot.reg());
1135 target_slot = source_slot;
1136 if (is_tee) state.inc_used(target_slot.reg());
1139 if (target_slot.is_reg()) state.dec_used(target_slot.reg());
1140 target_slot = source_slot;
1143 SetLocalFromStackSlot(target_slot, local_index);
1146 if (!is_tee) __ cache_state()->stack_state.pop_back();
1149 void SetLocal(FullDecoder* decoder,
const Value& value,
1150 const LocalIndexImmediate<validate>& imm) {
1151 SetLocal(imm.index,
false);
1154 void TeeLocal(FullDecoder* decoder,
const Value& value,
Value* result,
1155 const LocalIndexImmediate<validate>& imm) {
1156 SetLocal(imm.index,
true);
1159 Register GetGlobalBaseAndOffset(
const WasmGlobal* global,
1160 LiftoffRegList& pinned,
uint32_t* offset) {
1161 Register addr = pinned.set(__ GetUnusedRegister(kGpReg)).gp();
1162 if (global->mutability && global->imported) {
1163 LOAD_INSTANCE_FIELD(addr, ImportedMutableGlobals, kPointerSize);
1164 __ Load(LiftoffRegister(addr), addr, no_reg,
1165 global->index *
sizeof(Address), kPointerLoadType, pinned);
1168 LOAD_INSTANCE_FIELD(addr, GlobalsStart, kPointerSize);
1169 *offset = global->offset;
1174 void GetGlobal(FullDecoder* decoder,
Value* result,
1175 const GlobalIndexImmediate<validate>& imm) {
1176 const auto* global = &env_->module->globals[imm.index];
1177 if (!CheckSupportedType(decoder, kSupportedTypes, global->type,
"global"))
1179 LiftoffRegList pinned;
1181 Register addr = GetGlobalBaseAndOffset(global, pinned, &offset);
1182 LiftoffRegister value =
1183 pinned.set(__ GetUnusedRegister(reg_class_for(global->type), pinned));
1184 LoadType type = LoadType::ForValueType(global->type);
1185 __ Load(value, addr, no_reg, offset, type, pinned,
nullptr,
true);
1186 __ PushRegister(global->type, value);
1189 void SetGlobal(FullDecoder* decoder,
const Value& value,
1190 const GlobalIndexImmediate<validate>& imm) {
1191 auto* global = &env_->module->globals[imm.index];
1192 if (!CheckSupportedType(decoder, kSupportedTypes, global->type,
"global"))
1194 LiftoffRegList pinned;
1196 Register addr = GetGlobalBaseAndOffset(global, pinned, &offset);
1197 LiftoffRegister reg = pinned.set(__ PopToRegister(pinned));
1198 StoreType type = StoreType::ForValueType(global->type);
1199 __ Store(addr, no_reg, offset, reg, type, {},
nullptr,
true);
1202 void Unreachable(FullDecoder* decoder) {
1203 Label* unreachable_label = AddOutOfLineTrap(
1204 decoder->position(), WasmCode::kThrowWasmTrapUnreachable);
1205 __ emit_jump(unreachable_label);
1206 __ AssertUnreachable(AbortReason::kUnexpectedReturnFromWasmTrap);
1209 void Select(FullDecoder* decoder,
const Value& cond,
const Value& fval,
1211 LiftoffRegList pinned;
1212 Register condition = pinned.set(__ PopToRegister()).gp();
1213 ValueType type = __ cache_state()->stack_state.end()[-1].type();
1214 DCHECK_EQ(type, __ cache_state()->stack_state.end()[-2].type());
1215 LiftoffRegister false_value = pinned.set(__ PopToRegister(pinned));
1216 LiftoffRegister true_value = __ PopToRegister(pinned);
1217 LiftoffRegister dst =
1218 __ GetUnusedRegister(true_value.reg_class(), {true_value, false_value});
1219 __ PushRegister(type, dst);
1225 __ emit_cond_jump(kEqual, &case_false, kWasmI32, condition);
1226 if (dst != true_value) __ Move(dst, true_value, type);
1227 __ emit_jump(&cont);
1229 __ bind(&case_false);
1230 if (dst != false_value) __ Move(dst, false_value, type);
1234 void Br(Control* target) {
1235 if (!target->br_merge()->reached) {
1236 target->label_state.InitMerge(*__ cache_state(), __ num_locals(),
1237 target->br_merge()->arity);
1239 __ MergeStackWith(target->label_state, target->br_merge()->arity);
1240 __ jmp(target->label.get());
1243 void Br(FullDecoder* decoder, Control* target) { Br(target); }
1245 void BrIf(FullDecoder* decoder,
const Value& cond, Control* target) {
1247 Register value = __ PopToRegister().gp();
1248 __ emit_cond_jump(kEqual, &cont_false, kWasmI32, value);
1251 __ bind(&cont_false);
1256 void GenerateBrCase(FullDecoder* decoder,
uint32_t br_depth,
1257 std::map<uint32_t, MovableLabel>& br_targets) {
1258 MovableLabel& label = br_targets[br_depth];
1259 if (label.get()->is_bound()) {
1260 __ jmp(label.get());
1262 __ bind(label.get());
1263 Br(decoder->control_at(br_depth));
1269 void GenerateBrTable(FullDecoder* decoder, LiftoffRegister tmp,
1271 BranchTableIterator<validate>& table_iterator,
1272 std::map<uint32_t, MovableLabel>& br_targets) {
1273 DCHECK_LT(min, max);
1275 if (max == min + 1) {
1276 DCHECK_EQ(min, table_iterator.cur_index());
1277 GenerateBrCase(decoder, table_iterator.next(), br_targets);
1281 uint32_t split = min + (max - min) / 2;
1283 __ LoadConstant(tmp, WasmValue(split));
1284 __ emit_cond_jump(kUnsignedGreaterEqual, &upper_half, kWasmI32, value.gp(),
1287 GenerateBrTable(decoder, tmp, value, min, split, table_iterator,
1289 __ bind(&upper_half);
1291 GenerateBrTable(decoder, tmp, value, split, max, table_iterator,
1295 void BrTable(FullDecoder* decoder,
const BranchTableImmediate<validate>& imm,
1297 LiftoffRegList pinned;
1298 LiftoffRegister value = pinned.set(__ PopToRegister());
1299 BranchTableIterator<validate> table_iterator(decoder, imm);
1300 std::map<uint32_t, MovableLabel> br_targets;
1302 if (imm.table_count > 0) {
1303 LiftoffRegister tmp = __ GetUnusedRegister(kGpReg, pinned);
1304 __ LoadConstant(tmp, WasmValue(
uint32_t{imm.table_count}));
1306 __ emit_cond_jump(kUnsignedGreaterEqual, &case_default, kWasmI32,
1307 value.gp(), tmp.gp());
1309 GenerateBrTable(decoder, tmp, value, 0, imm.table_count, table_iterator,
1312 __ bind(&case_default);
1316 GenerateBrCase(decoder, table_iterator.next(), br_targets);
1317 DCHECK(!table_iterator.has_next());
1320 void Else(FullDecoder* decoder, Control* if_block) {
1321 if (if_block->reachable()) __ emit_jump(if_block->label.get());
1322 __ bind(if_block->else_state->label.get());
1323 __ cache_state()->Steal(if_block->else_state->state);
1326 Label* AddOutOfLineTrap(WasmCodePosition position,
1327 WasmCode::RuntimeStubId stub,
uint32_t pc = 0) {
1328 DCHECK(!FLAG_wasm_no_bounds_checks);
1331 DCHECK_EQ(pc != 0, stub == WasmCode::kThrowWasmTrapMemOutOfBounds &&
1332 env_->use_trap_handler);
1334 out_of_line_code_.push_back(OutOfLineCode::Trap(stub, position, pc));
1335 return out_of_line_code_.back().label.get();
1340 bool BoundsCheckMem(FullDecoder* decoder,
uint32_t access_size,
1341 uint32_t offset, Register index, LiftoffRegList pinned) {
1342 const bool statically_oob = access_size > env_->max_memory_size ||
1343 offset > env_->max_memory_size - access_size;
1345 if (!statically_oob &&
1346 (FLAG_wasm_no_bounds_checks || env_->use_trap_handler)) {
1353 Label* trap_label = AddOutOfLineTrap(
1354 decoder->position(), WasmCode::kThrowWasmTrapMemOutOfBounds,
1355 env_->use_trap_handler ? __ pc_offset() : 0);
1357 if (statically_oob) {
1358 __ emit_jump(trap_label);
1359 Control* current_block = decoder->control_at(0);
1360 if (current_block->reachable()) {
1361 current_block->reachability = kSpecOnlyReachable;
1366 DCHECK(!env_->use_trap_handler);
1367 DCHECK(!FLAG_wasm_no_bounds_checks);
1369 uint64_t end_offset = uint64_t{offset} + access_size - 1u;
1374 LiftoffRegister end_offset_reg =
1375 pinned.set(__ GetUnusedRegister(kGpReg, pinned));
1376 Register mem_size = __ GetUnusedRegister(kGpReg, pinned).gp();
1377 LOAD_INSTANCE_FIELD(mem_size, MemorySize, kPointerSize);
1379 if (kPointerSize == 8) {
1380 __ LoadConstant(end_offset_reg, WasmValue(end_offset));
1382 __ LoadConstant(end_offset_reg,
1383 WasmValue(static_cast<uint32_t>(end_offset)));
1386 if (end_offset >= env_->min_memory_size) {
1387 __ emit_cond_jump(kUnsignedGreaterEqual, trap_label,
1388 LiftoffAssembler::kWasmIntPtr, end_offset_reg.gp(),
1393 LiftoffRegister effective_size_reg = end_offset_reg;
1394 __ emit_ptrsize_sub(effective_size_reg.gp(), mem_size, end_offset_reg.gp());
1396 __ emit_i32_to_intptr(index, index);
1398 __ emit_cond_jump(kUnsignedGreaterEqual, trap_label,
1399 LiftoffAssembler::kWasmIntPtr, index,
1400 effective_size_reg.gp());
1404 void TraceMemoryOperation(
bool is_store, MachineRepresentation rep,
1406 WasmCodePosition position) {
1408 __ SpillAllRegisters();
1410 LiftoffRegList pinned = LiftoffRegList::ForRegs(index);
1412 LiftoffRegister address = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
1414 __ LoadConstant(address, WasmValue(offset));
1415 __ emit_i32_add(address.gp(), address.gp(), index);
1418 LiftoffRegister info = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
1420 __ AllocateStackSlot(info.gp(),
sizeof(MemoryTracingInfo));
1423 __ Store(info.gp(), no_reg, offsetof(MemoryTracingInfo, address), address,
1424 StoreType::kI32Store, pinned);
1425 __ LoadConstant(address, WasmValue(is_store ? 1 : 0));
1426 __ Store(info.gp(), no_reg, offsetof(MemoryTracingInfo, is_store), address,
1427 StoreType::kI32Store8, pinned);
1428 __ LoadConstant(address, WasmValue(static_cast<int>(rep)));
1429 __ Store(info.gp(), no_reg, offsetof(MemoryTracingInfo, mem_rep), address,
1430 StoreType::kI32Store8, pinned);
1432 source_position_table_builder_.AddPosition(__ pc_offset(),
1433 SourcePosition(position),
false);
1435 Register args[] = {info.gp()};
1436 GenerateRuntimeCall(Runtime::kWasmTraceMemory, arraysize(args), args);
1437 __ DeallocateStackSlot(
sizeof(MemoryTracingInfo));
1440 void GenerateRuntimeCall(Runtime::FunctionId runtime_function,
int num_args,
1442 auto call_descriptor = compiler::Linkage::GetRuntimeCallDescriptor(
1443 compilation_zone_, runtime_function, num_args,
1444 compiler::Operator::kNoProperties, compiler::CallDescriptor::kNoFlags);
1447 DCHECK_EQ(1, num_args);
1448 constexpr
size_t kInputShift = 1;
1449 compiler::LinkageLocation param_loc =
1450 call_descriptor->GetInputLocation(kInputShift);
1451 if (param_loc.IsRegister()) {
1452 Register reg = Register::from_code(param_loc.AsRegister());
1453 __ Move(LiftoffRegister(reg), LiftoffRegister(args[0]),
1454 LiftoffAssembler::kWasmIntPtr);
1456 DCHECK(param_loc.IsCallerFrameSlot());
1457 LiftoffStackSlots stack_slots(&asm_);
1458 stack_slots.Add(LiftoffAssembler::VarState(LiftoffAssembler::kWasmIntPtr,
1459 LiftoffRegister(args[0])));
1460 stack_slots.Construct();
1464 __ TurboAssembler::Move(kContextRegister,
1465 Smi::FromInt(Context::kNoContext));
1466 Register centry = kJavaScriptCallCodeStartRegister;
1467 LOAD_INSTANCE_FIELD(centry, CEntryStub, kPointerSize);
1468 __ CallRuntimeWithCEntry(runtime_function, centry);
1469 safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kSimple, 0,
1470 Safepoint::kNoLazyDeopt);
1473 Register AddMemoryMasking(Register index,
uint32_t* offset,
1474 LiftoffRegList& pinned) {
1475 if (!FLAG_untrusted_code_mitigations || env_->use_trap_handler) {
1478 DEBUG_CODE_COMMENT(
"Mask memory index");
1480 if (__ cache_state()->is_used(LiftoffRegister(index))) {
1481 Register old_index = index;
1482 pinned.clear(LiftoffRegister(old_index));
1483 index = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
1484 if (index != old_index) __ Move(index, old_index, kWasmI32);
1486 Register tmp = __ GetUnusedRegister(kGpReg, pinned).gp();
1487 __ LoadConstant(LiftoffRegister(tmp), WasmValue(*offset));
1488 __ emit_ptrsize_add(index, index, tmp);
1489 LOAD_INSTANCE_FIELD(tmp, MemoryMask, kPointerSize);
1490 __ emit_ptrsize_and(index, index, tmp);
1495 void LoadMem(FullDecoder* decoder, LoadType type,
1496 const MemoryAccessImmediate<validate>& imm,
1498 ValueType value_type = type.value_type();
1499 if (!CheckSupportedType(decoder, kSupportedTypes, value_type,
"load"))
1501 LiftoffRegList pinned;
1502 Register index = pinned.set(__ PopToRegister()).gp();
1503 if (BoundsCheckMem(decoder, type.size(), imm.offset, index, pinned)) {
1507 index = AddMemoryMasking(index, &offset, pinned);
1508 DEBUG_CODE_COMMENT(
"Load from memory");
1509 Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
1510 LOAD_INSTANCE_FIELD(addr, MemoryStart, kPointerSize);
1511 RegClass rc = reg_class_for(value_type);
1512 LiftoffRegister value = pinned.set(__ GetUnusedRegister(rc, pinned));
1514 __ Load(value, addr, index, offset, type, pinned, &protected_load_pc,
true);
1515 if (env_->use_trap_handler) {
1516 AddOutOfLineTrap(decoder->position(),
1517 WasmCode::kThrowWasmTrapMemOutOfBounds,
1520 __ PushRegister(value_type, value);
1522 if (FLAG_trace_wasm_memory) {
1523 TraceMemoryOperation(
false, type.mem_type().representation(), index,
1524 offset, decoder->position());
1528 void StoreMem(FullDecoder* decoder, StoreType type,
1529 const MemoryAccessImmediate<validate>& imm,
1530 const Value& index_val,
const Value& value_val) {
1531 ValueType value_type = type.value_type();
1532 if (!CheckSupportedType(decoder, kSupportedTypes, value_type,
"store"))
1534 LiftoffRegList pinned;
1535 LiftoffRegister value = pinned.set(__ PopToRegister());
1536 Register index = pinned.set(__ PopToRegister(pinned)).gp();
1537 if (BoundsCheckMem(decoder, type.size(), imm.offset, index, pinned)) {
1541 index = AddMemoryMasking(index, &offset, pinned);
1542 DEBUG_CODE_COMMENT(
"Store to memory");
1543 Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
1544 LOAD_INSTANCE_FIELD(addr, MemoryStart, kPointerSize);
1546 LiftoffRegList outer_pinned;
1547 if (FLAG_trace_wasm_memory) outer_pinned.set(index);
1548 __ Store(addr, index, offset, value, type, outer_pinned,
1549 &protected_store_pc,
true);
1550 if (env_->use_trap_handler) {
1551 AddOutOfLineTrap(decoder->position(),
1552 WasmCode::kThrowWasmTrapMemOutOfBounds,
1553 protected_store_pc);
1555 if (FLAG_trace_wasm_memory) {
1556 TraceMemoryOperation(
true, type.mem_rep(), index, offset,
1557 decoder->position());
1561 void CurrentMemoryPages(FullDecoder* decoder,
Value* result) {
1562 Register mem_size = __ GetUnusedRegister(kGpReg).gp();
1563 LOAD_INSTANCE_FIELD(mem_size, MemorySize, kPointerSize);
1564 __ emit_ptrsize_shr(mem_size, mem_size, kWasmPageSizeLog2);
1565 __ PushRegister(kWasmI32, LiftoffRegister(mem_size));
1568 void MemoryGrow(FullDecoder* decoder,
const Value& value,
Value* result_val) {
1570 LiftoffRegList pinned;
1571 LiftoffRegister input = pinned.set(__ PopToRegister());
1572 __ SpillAllRegisters();
1574 constexpr Register kGpReturnReg = kGpReturnRegisters[0];
1575 static_assert(kLiftoffAssemblerGpCacheRegs & Register::bit<kGpReturnReg>(),
1576 "first return register is a cache register (needs more " 1577 "complex code here otherwise)");
1578 LiftoffRegister result = pinned.set(LiftoffRegister(kGpReturnReg));
1580 WasmMemoryGrowDescriptor descriptor;
1581 DCHECK_EQ(0, descriptor.GetStackParameterCount());
1582 DCHECK_EQ(1, descriptor.GetRegisterParameterCount());
1583 DCHECK_EQ(ValueTypes::MachineTypeFor(kWasmI32),
1584 descriptor.GetParameterType(0));
1586 Register param_reg = descriptor.GetRegisterParameter(0);
1587 if (input.gp() != param_reg) __ Move(param_reg, input.gp(), kWasmI32);
1589 __ CallRuntimeStub(WasmCode::kWasmMemoryGrow);
1590 safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kSimple, 0,
1591 Safepoint::kNoLazyDeopt);
1593 if (kReturnRegister0 != result.gp()) {
1594 __ Move(result.gp(), kReturnRegister0, kWasmI32);
1597 __ PushRegister(kWasmI32, result);
1600 void CallDirect(FullDecoder* decoder,
1601 const CallFunctionImmediate<validate>& imm,
1603 if (imm.sig->return_count() > 1)
1604 return unsupported(decoder,
"multi-return");
1605 if (imm.sig->return_count() == 1 &&
1606 !CheckSupportedType(decoder, kSupportedTypes, imm.sig->GetReturn(0),
1609 if (DidAssemblerBailout(decoder))
return;
1611 auto call_descriptor =
1612 compiler::GetWasmCallDescriptor(compilation_zone_, imm.sig);
1614 GetLoweredCallDescriptor(compilation_zone_, call_descriptor);
1616 if (imm.index < env_->module->num_imported_functions) {
1618 LiftoffRegList pinned;
1619 Register tmp = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
1620 Register target = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
1622 Register imported_targets = tmp;
1623 LOAD_INSTANCE_FIELD(imported_targets, ImportedFunctionTargets,
1625 __ Load(LiftoffRegister(target), imported_targets, no_reg,
1626 imm.index *
sizeof(Address), kPointerLoadType, pinned);
1628 Register imported_function_refs = tmp;
1629 LOAD_INSTANCE_FIELD(imported_function_refs, ImportedFunctionRefs,
1631 Register imported_function_ref = tmp;
1632 __ Load(LiftoffRegister(imported_function_ref), imported_function_refs,
1633 no_reg, ObjectAccess::ElementOffsetInTaggedFixedArray(imm.index),
1634 kPointerLoadType, pinned);
1636 Register* explicit_instance = &imported_function_ref;
1637 __ PrepareCall(imm.sig, call_descriptor, &target, explicit_instance);
1638 source_position_table_builder_.AddPosition(
1639 __ pc_offset(), SourcePosition(decoder->position()),
false);
1641 __ CallIndirect(imm.sig, call_descriptor, target);
1643 safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kSimple, 0,
1644 Safepoint::kNoLazyDeopt);
1646 __ FinishCall(imm.sig, call_descriptor);
1649 __ PrepareCall(imm.sig, call_descriptor);
1651 source_position_table_builder_.AddPosition(
1652 __ pc_offset(), SourcePosition(decoder->position()),
false);
1655 Address addr =
static_cast<Address
>(imm.index);
1656 __ CallNativeWasmCode(addr);
1658 safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kSimple, 0,
1659 Safepoint::kNoLazyDeopt);
1661 __ FinishCall(imm.sig, call_descriptor);
1665 void CallIndirect(FullDecoder* decoder,
const Value& index_val,
1666 const CallIndirectImmediate<validate>& imm,
1668 if (imm.sig->return_count() > 1) {
1669 return unsupported(decoder,
"multi-return");
1671 if (imm.sig->return_count() == 1 &&
1672 !CheckSupportedType(decoder, kSupportedTypes, imm.sig->GetReturn(0),
1678 Register index = __ PopToRegister().gp();
1681 if (__ cache_state()->is_used(LiftoffRegister(index))) {
1682 Register new_index =
1683 __ GetUnusedRegister(kGpReg, LiftoffRegList::ForRegs(index)).gp();
1684 __ Move(new_index, index, kWasmI32);
1688 LiftoffRegList pinned = LiftoffRegList::ForRegs(index);
1690 Register table = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
1691 Register tmp_const = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
1692 Register scratch = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
1695 Label* invalid_func_label = AddOutOfLineTrap(
1696 decoder->position(), WasmCode::kThrowWasmTrapFuncInvalid);
1698 uint32_t canonical_sig_num = env_->module->signature_ids[imm.sig_index];
1699 DCHECK_GE(canonical_sig_num, 0);
1700 DCHECK_GE(kMaxInt, canonical_sig_num);
1704 LOAD_INSTANCE_FIELD(tmp_const, IndirectFunctionTableSize, kUInt32Size);
1705 __ emit_cond_jump(kUnsignedGreaterEqual, invalid_func_label, kWasmI32,
1709 if (FLAG_untrusted_code_mitigations) {
1710 DEBUG_CODE_COMMENT(
"Mask indirect call index");
1713 Register diff = table;
1714 Register neg_index = tmp_const;
1715 Register mask = scratch;
1717 __ emit_i32_sub(diff, index, tmp_const);
1719 __ LoadConstant(LiftoffRegister(neg_index), WasmValue(int32_t{-1}));
1720 __ emit_i32_xor(neg_index, neg_index, index);
1722 __ emit_i32_and(mask, diff, neg_index);
1724 __ LoadConstant(LiftoffRegister(tmp_const), WasmValue(int32_t{31}));
1725 __ emit_i32_sar(mask, mask, tmp_const, pinned);
1728 __ emit_i32_and(index, index, mask);
1731 DEBUG_CODE_COMMENT(
"Check indirect call signature");
1733 LOAD_INSTANCE_FIELD(table, IndirectFunctionTableSigIds, kPointerSize);
1734 __ LoadConstant(LiftoffRegister(tmp_const),
1735 WasmValue(static_cast<uint32_t>(
sizeof(
uint32_t))));
1738 __ emit_i32_mul(index, index, tmp_const);
1739 __ Load(LiftoffRegister(scratch), table, index, 0, LoadType::kI32Load,
1743 __ LoadConstant(LiftoffRegister(tmp_const), WasmValue(canonical_sig_num));
1745 Label* sig_mismatch_label = AddOutOfLineTrap(
1746 decoder->position(), WasmCode::kThrowWasmTrapFuncSigMismatch);
1747 __ emit_cond_jump(kUnequal, sig_mismatch_label,
1748 LiftoffAssembler::kWasmIntPtr, scratch, tmp_const);
1750 DEBUG_CODE_COMMENT(
"Execute indirect call");
1751 if (kPointerSize == 8) {
1753 __ LoadConstant(LiftoffRegister(tmp_const), WasmValue(2));
1754 __ emit_i32_mul(index, index, tmp_const);
1758 LOAD_INSTANCE_FIELD(table, IndirectFunctionTableTargets, kPointerSize);
1759 __ Load(LiftoffRegister(scratch), table, index, 0, kPointerLoadType,
1763 LOAD_INSTANCE_FIELD(table, IndirectFunctionTableRefs, kPointerSize);
1764 __ Load(LiftoffRegister(tmp_const), table, index,
1765 ObjectAccess::ElementOffsetInTaggedFixedArray(0), kPointerLoadType,
1767 Register* explicit_instance = &tmp_const;
1769 source_position_table_builder_.AddPosition(
1770 __ pc_offset(), SourcePosition(decoder->position()),
false);
1772 auto call_descriptor =
1773 compiler::GetWasmCallDescriptor(compilation_zone_, imm.sig);
1775 GetLoweredCallDescriptor(compilation_zone_, call_descriptor);
1777 Register target = scratch;
1778 __ PrepareCall(imm.sig, call_descriptor, &target, explicit_instance);
1779 __ CallIndirect(imm.sig, call_descriptor, target);
1781 safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kSimple, 0,
1782 Safepoint::kNoLazyDeopt);
1784 __ FinishCall(imm.sig, call_descriptor);
1787 void SimdOp(FullDecoder* decoder, WasmOpcode opcode, Vector<Value> args,
1789 unsupported(decoder,
"simd");
1791 void SimdLaneOp(FullDecoder* decoder, WasmOpcode opcode,
1792 const SimdLaneImmediate<validate>& imm,
1793 const Vector<Value> inputs,
Value* result) {
1794 unsupported(decoder,
"simd");
1796 void SimdShiftOp(FullDecoder* decoder, WasmOpcode opcode,
1797 const SimdShiftImmediate<validate>& imm,
const Value& input,
1799 unsupported(decoder,
"simd");
1801 void Simd8x16ShuffleOp(FullDecoder* decoder,
1802 const Simd8x16ShuffleImmediate<validate>& imm,
1805 unsupported(decoder,
"simd");
1807 void Throw(FullDecoder* decoder,
const ExceptionIndexImmediate<validate>&,
1808 const Vector<Value>& args) {
1809 unsupported(decoder,
"throw");
1811 void Rethrow(FullDecoder* decoder, Control* block) {
1812 unsupported(decoder,
"rethrow");
1814 void CatchException(FullDecoder* decoder,
1815 const ExceptionIndexImmediate<validate>& imm,
1816 Control* block, Vector<Value> caught_values) {
1817 unsupported(decoder,
"catch");
1819 void CatchAll(FullDecoder* decoder, Control* block) {
1820 unsupported(decoder,
"catch-all");
1822 void AtomicOp(FullDecoder* decoder, WasmOpcode opcode, Vector<Value> args,
1823 const MemoryAccessImmediate<validate>& imm,
Value* result) {
1824 unsupported(decoder,
"atomicop");
1826 void MemoryInit(FullDecoder* decoder,
1827 const MemoryInitImmediate<validate>& imm,
1828 Vector<Value> args) {
1829 unsupported(decoder,
"memory.init");
1831 void MemoryDrop(FullDecoder* decoder,
1832 const MemoryDropImmediate<validate>& imm) {
1833 unsupported(decoder,
"memory.drop");
1835 void MemoryCopy(FullDecoder* decoder,
1836 const MemoryIndexImmediate<validate>& imm,
1837 Vector<Value> args) {
1838 unsupported(decoder,
"memory.copy");
1840 void MemoryFill(FullDecoder* decoder,
1841 const MemoryIndexImmediate<validate>& imm,
1842 Vector<Value> args) {
1843 unsupported(decoder,
"memory.fill");
1845 void TableInit(FullDecoder* decoder,
const TableInitImmediate<validate>& imm,
1846 Vector<Value> args) {
1847 unsupported(decoder,
"table.init");
1849 void TableDrop(FullDecoder* decoder,
1850 const TableDropImmediate<validate>& imm) {
1851 unsupported(decoder,
"table.drop");
1853 void TableCopy(FullDecoder* decoder,
const TableIndexImmediate<validate>& imm,
1854 Vector<Value> args) {
1855 unsupported(decoder,
"table.copy");
1859 LiftoffAssembler asm_;
1860 compiler::CallDescriptor*
const descriptor_;
1861 CompilationEnv*
const env_;
1863 std::vector<OutOfLineCode> out_of_line_code_;
1864 SourcePositionTableBuilder source_position_table_builder_;
1865 std::vector<trap_handler::ProtectedInstructionData> protected_instructions_;
1869 Zone* compilation_zone_;
1870 SafepointTableBuilder safepoint_table_builder_;
1873 uint32_t pc_offset_stack_frame_construction_ = 0;
1875 void TraceCacheState(FullDecoder* decoder)
const {
1877 if (!FLAG_trace_liftoff || !FLAG_trace_wasm_decoder)
return;
1879 for (
int control_depth = decoder->control_depth() - 1; control_depth >= -1;
1882 control_depth == -1 ? __ cache_state()
1883 : &decoder->control_at(control_depth)
1885 os << PrintCollection(cache_state->stack_state);
1886 if (control_depth != -1) PrintF(
"; ");
1892 DISALLOW_IMPLICIT_CONSTRUCTORS(LiftoffCompiler);
1897 bool LiftoffCompilationUnit::ExecuteCompilation(CompilationEnv* env,
1898 const FunctionBody& func_body,
1900 WasmFeatures* detected) {
1901 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT(
"v8.wasm"),
1902 "ExecuteLiftoffCompilation");
1903 base::ElapsedTimer compile_timer;
1904 if (FLAG_trace_wasm_decode_time) {
1905 compile_timer.Start();
1908 Zone zone(wasm_unit_->wasm_engine_->allocator(),
"LiftoffCompilationZone");
1909 const WasmModule* module = env ? env->module :
nullptr;
1910 auto call_descriptor = compiler::GetWasmCallDescriptor(&zone, func_body.sig);
1911 base::Optional<TimedHistogramScope> liftoff_compile_time_scope(
1912 base::in_place, counters->liftoff_compile_time());
1913 WasmFullDecoder<Decoder::kValidate, LiftoffCompiler> decoder(
1914 &zone, module, wasm_unit_->native_module_->enabled_features(), detected,
1915 func_body, call_descriptor, env, &zone);
1917 liftoff_compile_time_scope.reset();
1918 LiftoffCompiler* compiler = &decoder.interface();
1919 if (decoder.failed())
return false;
1920 if (!compiler->ok()) {
1922 counters->liftoff_unsupported_functions()->Increment();
1926 counters->liftoff_compiled_functions()->Increment();
1928 if (FLAG_trace_wasm_decode_time) {
1929 double compile_ms = compile_timer.Elapsed().InMillisecondsF();
1931 "wasm-compilation liftoff phase 1 ok: %u bytes, %0.3f ms decode and " 1933 static_cast<unsigned>(func_body.end - func_body.start), compile_ms);
1937 compiler->GetCode(&desc);
1938 OwnedVector<byte> source_positions = compiler->GetSourcePositionTable();
1939 OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions =
1940 compiler->GetProtectedInstructions();
1941 uint32_t frame_slot_count = compiler->GetTotalFrameSlotCount();
1942 int safepoint_table_offset = compiler->GetSafepointTableOffset();
1944 WasmCode* code = wasm_unit_->native_module_->AddCode(
1945 wasm_unit_->func_index_, desc, frame_slot_count, safepoint_table_offset,
1946 0, std::move(protected_instructions), std::move(source_positions),
1947 WasmCode::kFunction, WasmCode::kLiftoff);
1948 wasm_unit_->SetResult(code, counters);
1955 #undef WASM_INSTANCE_OBJECT_OFFSET 1956 #undef WASM_INSTANCE_OBJECT_SIZE 1957 #undef LOAD_INSTANCE_FIELD 1958 #undef DEBUG_CODE_COMMENT