5 #include "src/interpreter/interpreter-assembler.h" 10 #include "src/code-factory.h" 11 #include "src/frames.h" 12 #include "src/interface-descriptors.h" 13 #include "src/interpreter/bytecodes.h" 14 #include "src/interpreter/interpreter.h" 15 #include "src/machine-type.h" 16 #include "src/macro-assembler.h" 17 #include "src/objects-inl.h" 18 #include "src/zone/zone.h" 22 namespace interpreter {
24 using compiler::CodeAssemblerState;
27 using TNode = compiler::TNode<T>;
29 InterpreterAssembler::InterpreterAssembler(CodeAssemblerState* state,
31 OperandScale operand_scale)
32 : CodeStubAssembler(state),
34 operand_scale_(operand_scale),
35 VARIABLE_CONSTRUCTOR(interpreted_frame_pointer_,
36 MachineType::PointerRepresentation()),
38 bytecode_array_, MachineRepresentation::kTagged,
39 Parameter(InterpreterDispatchDescriptor::kBytecodeArray)),
41 bytecode_offset_, MachineType::PointerRepresentation(),
42 Parameter(InterpreterDispatchDescriptor::kBytecodeOffset)),
44 dispatch_table_, MachineType::PointerRepresentation(),
45 Parameter(InterpreterDispatchDescriptor::kDispatchTable)),
47 accumulator_, MachineRepresentation::kTagged,
48 Parameter(InterpreterDispatchDescriptor::kAccumulator)),
49 accumulator_use_(AccumulatorUse::
kNone),
51 reloaded_frame_ptr_(false),
52 bytecode_array_valid_(true),
53 disable_stack_check_across_call_(false),
54 stack_pointer_before_call_(nullptr) {
55 #ifdef V8_TRACE_IGNITION 56 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
58 RegisterCallGenerationCallbacks([
this] { CallPrologue(); },
59 [
this] { CallEpilogue(); });
63 if (Bytecodes::MakesCallAlongCriticalPath(bytecode) ||
64 Bytecodes::Returns(bytecode)) {
69 InterpreterAssembler::~InterpreterAssembler() {
73 DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
74 UnregisterCallGenerationCallbacks();
77 Node* InterpreterAssembler::GetInterpretedFramePointer() {
78 if (!interpreted_frame_pointer_.IsBound()) {
79 interpreted_frame_pointer_.Bind(LoadParentFramePointer());
80 }
else if (Bytecodes::MakesCallAlongCriticalPath(bytecode_) && made_call_ &&
81 !reloaded_frame_ptr_) {
82 interpreted_frame_pointer_.Bind(LoadParentFramePointer());
83 reloaded_frame_ptr_ =
true;
85 return interpreted_frame_pointer_.value();
88 Node* InterpreterAssembler::BytecodeOffset() {
89 if (Bytecodes::MakesCallAlongCriticalPath(bytecode_) && made_call_ &&
90 (bytecode_offset_.value() ==
91 Parameter(InterpreterDispatchDescriptor::kBytecodeOffset))) {
92 bytecode_offset_.Bind(ReloadBytecodeOffset());
94 return bytecode_offset_.value();
97 Node* InterpreterAssembler::ReloadBytecodeOffset() {
98 Node* offset = LoadAndUntagRegister(Register::bytecode_offset());
99 if (operand_scale() != OperandScale::kSingle) {
102 offset = IntPtrAdd(offset, IntPtrConstant(1));
107 void InterpreterAssembler::SaveBytecodeOffset() {
108 Node* offset = BytecodeOffset();
109 if (operand_scale() != OperandScale::kSingle) {
112 offset = IntPtrSub(BytecodeOffset(), IntPtrConstant(1));
114 StoreAndTagRegister(offset, Register::bytecode_offset());
117 Node* InterpreterAssembler::BytecodeArrayTaggedPointer() {
120 if (!bytecode_array_valid_) {
121 bytecode_array_.Bind(LoadRegister(Register::bytecode_array()));
122 bytecode_array_valid_ =
true;
124 return bytecode_array_.value();
127 Node* InterpreterAssembler::DispatchTableRawPointer() {
128 if (Bytecodes::MakesCallAlongCriticalPath(bytecode_) && made_call_ &&
129 (dispatch_table_.value() ==
130 Parameter(InterpreterDispatchDescriptor::kDispatchTable))) {
131 dispatch_table_.Bind(ExternalConstant(
132 ExternalReference::interpreter_dispatch_table_address(isolate())));
134 return dispatch_table_.value();
137 Node* InterpreterAssembler::GetAccumulatorUnchecked() {
138 return accumulator_.value();
141 Node* InterpreterAssembler::GetAccumulator() {
142 DCHECK(Bytecodes::ReadsAccumulator(bytecode_));
143 accumulator_use_ = accumulator_use_ | AccumulatorUse::kRead;
144 return TaggedPoisonOnSpeculation(GetAccumulatorUnchecked());
147 void InterpreterAssembler::SetAccumulator(Node* value) {
148 DCHECK(Bytecodes::WritesAccumulator(bytecode_));
149 accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
150 accumulator_.Bind(value);
153 Node* InterpreterAssembler::GetContext() {
154 return LoadRegister(Register::current_context());
157 void InterpreterAssembler::SetContext(Node* value) {
158 StoreRegister(value, Register::current_context());
161 Node* InterpreterAssembler::GetContextAtDepth(Node* context, Node* depth) {
162 Variable cur_context(
this, MachineRepresentation::kTaggedPointer);
163 cur_context.Bind(context);
165 Variable cur_depth(
this, MachineRepresentation::kWord32);
166 cur_depth.Bind(depth);
168 Label context_found(
this);
170 Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context};
171 Label context_search(
this, 2, context_search_loop_variables);
174 Branch(Word32Equal(depth, Int32Constant(0)), &context_found, &context_search);
177 BIND(&context_search);
179 cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1)));
181 LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
183 Branch(Word32Equal(cur_depth.value(), Int32Constant(0)), &context_found,
187 BIND(&context_found);
188 return cur_context.value();
191 void InterpreterAssembler::GotoIfHasContextExtensionUpToDepth(Node* context,
194 Variable cur_context(
this, MachineRepresentation::kTaggedPointer);
195 cur_context.Bind(context);
197 Variable cur_depth(
this, MachineRepresentation::kWord32);
198 cur_depth.Bind(depth);
200 Variable* context_search_loop_variables[2] = {&cur_depth, &cur_context};
201 Label context_search(
this, 2, context_search_loop_variables);
204 Goto(&context_search);
205 BIND(&context_search);
211 Node* extension_slot =
212 LoadContextElement(cur_context.value(), Context::EXTENSION_INDEX);
215 GotoIf(WordNotEqual(extension_slot, TheHoleConstant()), target);
217 cur_depth.Bind(Int32Sub(cur_depth.value(), Int32Constant(1)));
219 LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
221 GotoIf(Word32NotEqual(cur_depth.value(), Int32Constant(0)),
226 Node* InterpreterAssembler::RegisterLocation(Node* reg_index) {
227 return WordPoisonOnSpeculation(
228 IntPtrAdd(GetInterpretedFramePointer(), RegisterFrameOffset(reg_index)));
231 Node* InterpreterAssembler::RegisterLocation(Register reg) {
232 return RegisterLocation(IntPtrConstant(reg.ToOperand()));
235 Node* InterpreterAssembler::RegisterFrameOffset(Node* index) {
236 return TimesPointerSize(index);
239 Node* InterpreterAssembler::LoadRegister(Node* reg_index) {
240 return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(),
241 RegisterFrameOffset(reg_index), LoadSensitivity::kCritical);
244 Node* InterpreterAssembler::LoadRegister(Register reg) {
245 return Load(MachineType::AnyTagged(), GetInterpretedFramePointer(),
246 IntPtrConstant(reg.ToOperand() << kPointerSizeLog2));
249 Node* InterpreterAssembler::LoadAndUntagRegister(Register reg) {
250 return LoadAndUntagSmi(GetInterpretedFramePointer(), reg.ToOperand()
251 << kPointerSizeLog2);
254 Node* InterpreterAssembler::LoadRegisterAtOperandIndex(
int operand_index) {
256 BytecodeOperandReg(operand_index, LoadSensitivity::kSafe));
259 std::pair<Node*, Node*> InterpreterAssembler::LoadRegisterPairAtOperandIndex(
261 DCHECK_EQ(OperandType::kRegPair,
262 Bytecodes::GetOperandType(bytecode_, operand_index));
263 Node* first_reg_index =
264 BytecodeOperandReg(operand_index, LoadSensitivity::kSafe);
265 Node* second_reg_index = NextRegister(first_reg_index);
266 return std::make_pair(LoadRegister(first_reg_index),
267 LoadRegister(second_reg_index));
270 InterpreterAssembler::RegListNodePair
271 InterpreterAssembler::GetRegisterListAtOperandIndex(
int operand_index) {
272 DCHECK(Bytecodes::IsRegisterListOperandType(
273 Bytecodes::GetOperandType(bytecode_, operand_index)));
274 DCHECK_EQ(OperandType::kRegCount,
275 Bytecodes::GetOperandType(bytecode_, operand_index + 1));
276 Node* base_reg = RegisterLocation(
277 BytecodeOperandReg(operand_index, LoadSensitivity::kSafe));
278 Node* reg_count = BytecodeOperandCount(operand_index + 1);
279 return RegListNodePair(base_reg, reg_count);
282 Node* InterpreterAssembler::LoadRegisterFromRegisterList(
283 const RegListNodePair& reg_list,
int index) {
284 Node* location = RegisterLocationInRegisterList(reg_list, index);
286 return Load(MachineType::AnyTagged(), location);
289 Node* InterpreterAssembler::RegisterLocationInRegisterList(
290 const RegListNodePair& reg_list,
int index) {
292 Uint32GreaterThan(reg_list.reg_count(), Int32Constant(index)));
293 Node* offset = RegisterFrameOffset(IntPtrConstant(index));
296 return IntPtrSub(reg_list.base_reg_location(), offset);
299 void InterpreterAssembler::StoreRegister(Node* value, Register reg) {
301 MachineRepresentation::kTagged, GetInterpretedFramePointer(),
302 IntPtrConstant(reg.ToOperand() << kPointerSizeLog2), value);
305 void InterpreterAssembler::StoreRegister(Node* value, Node* reg_index) {
306 StoreNoWriteBarrier(MachineRepresentation::kTagged,
307 GetInterpretedFramePointer(),
308 RegisterFrameOffset(reg_index), value);
311 void InterpreterAssembler::StoreAndTagRegister(Node* value, Register reg) {
312 int offset = reg.ToOperand() << kPointerSizeLog2;
313 StoreAndTagSmi(GetInterpretedFramePointer(), offset, value);
316 void InterpreterAssembler::StoreRegisterAtOperandIndex(Node* value,
319 BytecodeOperandReg(operand_index, LoadSensitivity::kSafe));
322 void InterpreterAssembler::StoreRegisterPairAtOperandIndex(Node* value1,
325 DCHECK_EQ(OperandType::kRegOutPair,
326 Bytecodes::GetOperandType(bytecode_, operand_index));
327 Node* first_reg_index =
328 BytecodeOperandReg(operand_index, LoadSensitivity::kSafe);
329 StoreRegister(value1, first_reg_index);
330 Node* second_reg_index = NextRegister(first_reg_index);
331 StoreRegister(value2, second_reg_index);
334 void InterpreterAssembler::StoreRegisterTripleAtOperandIndex(
335 Node* value1, Node* value2, Node* value3,
int operand_index) {
336 DCHECK_EQ(OperandType::kRegOutTriple,
337 Bytecodes::GetOperandType(bytecode_, operand_index));
338 Node* first_reg_index =
339 BytecodeOperandReg(operand_index, LoadSensitivity::kSafe);
340 StoreRegister(value1, first_reg_index);
341 Node* second_reg_index = NextRegister(first_reg_index);
342 StoreRegister(value2, second_reg_index);
343 Node* third_reg_index = NextRegister(second_reg_index);
344 StoreRegister(value3, third_reg_index);
347 Node* InterpreterAssembler::NextRegister(Node* reg_index) {
349 return IntPtrAdd(reg_index, IntPtrConstant(-1));
352 Node* InterpreterAssembler::OperandOffset(
int operand_index) {
353 return IntPtrConstant(
354 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale()));
357 Node* InterpreterAssembler::BytecodeOperandUnsignedByte(
358 int operand_index, LoadSensitivity needs_poisoning) {
359 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
360 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
361 bytecode_, operand_index, operand_scale()));
362 Node* operand_offset = OperandOffset(operand_index);
363 return Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(),
364 IntPtrAdd(BytecodeOffset(), operand_offset), needs_poisoning);
367 Node* InterpreterAssembler::BytecodeOperandSignedByte(
368 int operand_index, LoadSensitivity needs_poisoning) {
369 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
370 DCHECK_EQ(OperandSize::kByte, Bytecodes::GetOperandSize(
371 bytecode_, operand_index, operand_scale()));
372 Node* operand_offset = OperandOffset(operand_index);
373 return Load(MachineType::Int8(), BytecodeArrayTaggedPointer(),
374 IntPtrAdd(BytecodeOffset(), operand_offset), needs_poisoning);
377 Node* InterpreterAssembler::BytecodeOperandReadUnaligned(
378 int relative_offset, MachineType result_type,
379 LoadSensitivity needs_poisoning) {
380 static const int kMaxCount = 4;
381 DCHECK(!TargetSupportsUnalignedAccess());
384 switch (result_type.representation()) {
385 case MachineRepresentation::kWord16:
388 case MachineRepresentation::kWord32:
395 MachineType msb_type =
396 result_type.IsSigned() ? MachineType::Int8() : MachineType::Uint8();
398 #if V8_TARGET_LITTLE_ENDIAN 399 const int kStep = -1;
400 int msb_offset = count - 1;
401 #elif V8_TARGET_BIG_ENDIAN 405 #error "Unknown Architecture" 410 DCHECK_LE(count, kMaxCount);
411 Node* bytes[kMaxCount];
412 for (
int i = 0;
i < count;
i++) {
413 MachineType machine_type = (
i == 0) ? msb_type : MachineType::Uint8();
414 Node* offset = IntPtrConstant(relative_offset + msb_offset +
i * kStep);
415 Node* array_offset = IntPtrAdd(BytecodeOffset(), offset);
416 bytes[
i] = Load(machine_type, BytecodeArrayTaggedPointer(), array_offset,
421 Node* result = bytes[--count];
422 for (
int i = 1; --count >= 0;
i++) {
423 Node* shift = Int32Constant(
i * kBitsPerByte);
424 Node* value = Word32Shl(bytes[count], shift);
425 result = Word32Or(value, result);
430 Node* InterpreterAssembler::BytecodeOperandUnsignedShort(
431 int operand_index, LoadSensitivity needs_poisoning) {
432 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
435 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
437 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
438 if (TargetSupportsUnalignedAccess()) {
439 return Load(MachineType::Uint16(), BytecodeArrayTaggedPointer(),
440 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)),
443 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint16(),
448 Node* InterpreterAssembler::BytecodeOperandSignedShort(
449 int operand_index, LoadSensitivity needs_poisoning) {
450 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
453 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale()));
455 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
456 if (TargetSupportsUnalignedAccess()) {
457 return Load(MachineType::Int16(), BytecodeArrayTaggedPointer(),
458 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)),
461 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Int16(),
466 Node* InterpreterAssembler::BytecodeOperandUnsignedQuad(
467 int operand_index, LoadSensitivity needs_poisoning) {
468 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
469 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
470 bytecode_, operand_index, operand_scale()));
472 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
473 if (TargetSupportsUnalignedAccess()) {
474 return Load(MachineType::Uint32(), BytecodeArrayTaggedPointer(),
475 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)),
478 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Uint32(),
483 Node* InterpreterAssembler::BytecodeOperandSignedQuad(
484 int operand_index, LoadSensitivity needs_poisoning) {
485 DCHECK_LT(operand_index, Bytecodes::NumberOfOperands(bytecode_));
486 DCHECK_EQ(OperandSize::kQuad, Bytecodes::GetOperandSize(
487 bytecode_, operand_index, operand_scale()));
489 Bytecodes::GetOperandOffset(bytecode_, operand_index, operand_scale());
490 if (TargetSupportsUnalignedAccess()) {
491 return Load(MachineType::Int32(), BytecodeArrayTaggedPointer(),
492 IntPtrAdd(BytecodeOffset(), IntPtrConstant(operand_offset)),
495 return BytecodeOperandReadUnaligned(operand_offset, MachineType::Int32(),
500 Node* InterpreterAssembler::BytecodeSignedOperand(
501 int operand_index, OperandSize operand_size,
502 LoadSensitivity needs_poisoning) {
503 DCHECK(!Bytecodes::IsUnsignedOperandType(
504 Bytecodes::GetOperandType(bytecode_, operand_index)));
505 switch (operand_size) {
506 case OperandSize::kByte:
507 return BytecodeOperandSignedByte(operand_index, needs_poisoning);
508 case OperandSize::kShort:
509 return BytecodeOperandSignedShort(operand_index, needs_poisoning);
510 case OperandSize::kQuad:
511 return BytecodeOperandSignedQuad(operand_index, needs_poisoning);
512 case OperandSize::kNone:
518 Node* InterpreterAssembler::BytecodeUnsignedOperand(
519 int operand_index, OperandSize operand_size,
520 LoadSensitivity needs_poisoning) {
521 DCHECK(Bytecodes::IsUnsignedOperandType(
522 Bytecodes::GetOperandType(bytecode_, operand_index)));
523 switch (operand_size) {
524 case OperandSize::kByte:
525 return BytecodeOperandUnsignedByte(operand_index, needs_poisoning);
526 case OperandSize::kShort:
527 return BytecodeOperandUnsignedShort(operand_index, needs_poisoning);
528 case OperandSize::kQuad:
529 return BytecodeOperandUnsignedQuad(operand_index, needs_poisoning);
530 case OperandSize::kNone:
536 Node* InterpreterAssembler::BytecodeOperandCount(
int operand_index) {
537 DCHECK_EQ(OperandType::kRegCount,
538 Bytecodes::GetOperandType(bytecode_, operand_index));
539 OperandSize operand_size =
540 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
541 return BytecodeUnsignedOperand(operand_index, operand_size);
544 Node* InterpreterAssembler::BytecodeOperandFlag(
int operand_index) {
545 DCHECK_EQ(OperandType::kFlag8,
546 Bytecodes::GetOperandType(bytecode_, operand_index));
547 OperandSize operand_size =
548 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
549 DCHECK_EQ(operand_size, OperandSize::kByte);
550 return BytecodeUnsignedOperand(operand_index, operand_size);
553 Node* InterpreterAssembler::BytecodeOperandUImm(
int operand_index) {
554 DCHECK_EQ(OperandType::kUImm,
555 Bytecodes::GetOperandType(bytecode_, operand_index));
556 OperandSize operand_size =
557 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
558 return BytecodeUnsignedOperand(operand_index, operand_size);
561 Node* InterpreterAssembler::BytecodeOperandUImmWord(
int operand_index) {
562 return ChangeUint32ToWord(BytecodeOperandUImm(operand_index));
565 Node* InterpreterAssembler::BytecodeOperandUImmSmi(
int operand_index) {
566 return SmiFromInt32(BytecodeOperandUImm(operand_index));
569 Node* InterpreterAssembler::BytecodeOperandImm(
int operand_index) {
570 DCHECK_EQ(OperandType::kImm,
571 Bytecodes::GetOperandType(bytecode_, operand_index));
572 OperandSize operand_size =
573 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
574 return BytecodeSignedOperand(operand_index, operand_size);
577 Node* InterpreterAssembler::BytecodeOperandImmIntPtr(
int operand_index) {
578 return ChangeInt32ToIntPtr(BytecodeOperandImm(operand_index));
581 Node* InterpreterAssembler::BytecodeOperandImmSmi(
int operand_index) {
582 return SmiFromInt32(BytecodeOperandImm(operand_index));
585 Node* InterpreterAssembler::BytecodeOperandIdxInt32(
int operand_index) {
586 DCHECK_EQ(OperandType::kIdx,
587 Bytecodes::GetOperandType(bytecode_, operand_index));
588 OperandSize operand_size =
589 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
590 return BytecodeUnsignedOperand(operand_index, operand_size);
593 Node* InterpreterAssembler::BytecodeOperandIdx(
int operand_index) {
594 return ChangeUint32ToWord(BytecodeOperandIdxInt32(operand_index));
597 Node* InterpreterAssembler::BytecodeOperandIdxSmi(
int operand_index) {
598 return SmiTag(BytecodeOperandIdx(operand_index));
601 Node* InterpreterAssembler::BytecodeOperandConstantPoolIdx(
602 int operand_index, LoadSensitivity needs_poisoning) {
603 DCHECK_EQ(OperandType::kIdx,
604 Bytecodes::GetOperandType(bytecode_, operand_index));
605 OperandSize operand_size =
606 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
607 return ChangeUint32ToWord(
608 BytecodeUnsignedOperand(operand_index, operand_size, needs_poisoning));
611 Node* InterpreterAssembler::BytecodeOperandReg(
612 int operand_index, LoadSensitivity needs_poisoning) {
613 DCHECK(Bytecodes::IsRegisterOperandType(
614 Bytecodes::GetOperandType(bytecode_, operand_index)));
615 OperandSize operand_size =
616 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
617 return ChangeInt32ToIntPtr(
618 BytecodeSignedOperand(operand_index, operand_size, needs_poisoning));
621 Node* InterpreterAssembler::BytecodeOperandRuntimeId(
int operand_index) {
622 DCHECK_EQ(OperandType::kRuntimeId,
623 Bytecodes::GetOperandType(bytecode_, operand_index));
624 OperandSize operand_size =
625 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
626 DCHECK_EQ(operand_size, OperandSize::kShort);
627 return BytecodeUnsignedOperand(operand_index, operand_size);
630 Node* InterpreterAssembler::BytecodeOperandNativeContextIndex(
632 DCHECK_EQ(OperandType::kNativeContextIndex,
633 Bytecodes::GetOperandType(bytecode_, operand_index));
634 OperandSize operand_size =
635 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
636 return ChangeUint32ToWord(
637 BytecodeUnsignedOperand(operand_index, operand_size));
640 Node* InterpreterAssembler::BytecodeOperandIntrinsicId(
int operand_index) {
641 DCHECK_EQ(OperandType::kIntrinsicId,
642 Bytecodes::GetOperandType(bytecode_, operand_index));
643 OperandSize operand_size =
644 Bytecodes::GetOperandSize(bytecode_, operand_index, operand_scale());
645 DCHECK_EQ(operand_size, OperandSize::kByte);
646 return BytecodeUnsignedOperand(operand_index, operand_size);
649 Node* InterpreterAssembler::LoadConstantPoolEntry(Node* index) {
650 TNode<FixedArray> constant_pool = CAST(LoadObjectField(
651 BytecodeArrayTaggedPointer(), BytecodeArray::kConstantPoolOffset));
652 return LoadFixedArrayElement(constant_pool, UncheckedCast<IntPtrT>(index),
653 LoadSensitivity::kCritical);
656 Node* InterpreterAssembler::LoadAndUntagConstantPoolEntry(Node* index) {
657 return SmiUntag(LoadConstantPoolEntry(index));
660 Node* InterpreterAssembler::LoadConstantPoolEntryAtOperandIndex(
663 BytecodeOperandConstantPoolIdx(operand_index, LoadSensitivity::kSafe);
664 return LoadConstantPoolEntry(index);
667 Node* InterpreterAssembler::LoadAndUntagConstantPoolEntryAtOperandIndex(
669 return SmiUntag(LoadConstantPoolEntryAtOperandIndex(operand_index));
672 TNode<FeedbackVector> InterpreterAssembler::LoadFeedbackVector() {
673 TNode<JSFunction>
function = CAST(LoadRegister(Register::function_closure()));
674 return CodeStubAssembler::LoadFeedbackVector(
function);
677 Node* InterpreterAssembler::LoadFeedbackVectorUnchecked() {
678 TNode<JSFunction>
function = CAST(LoadRegister(Register::function_closure()));
679 return CodeStubAssembler::LoadFeedbackVectorUnchecked(
function);
682 void InterpreterAssembler::CallPrologue() {
683 if (!Bytecodes::MakesCallAlongCriticalPath(bytecode_)) {
689 SaveBytecodeOffset();
692 if (FLAG_debug_code && !disable_stack_check_across_call_) {
693 DCHECK_NULL(stack_pointer_before_call_);
694 stack_pointer_before_call_ = LoadStackPointer();
696 bytecode_array_valid_ =
false;
700 void InterpreterAssembler::CallEpilogue() {
701 if (FLAG_debug_code && !disable_stack_check_across_call_) {
702 Node* stack_pointer_after_call = LoadStackPointer();
703 Node* stack_pointer_before_call = stack_pointer_before_call_;
704 stack_pointer_before_call_ =
nullptr;
705 AbortIfWordNotEqual(stack_pointer_before_call, stack_pointer_after_call,
706 AbortReason::kUnexpectedStackPointer);
710 void InterpreterAssembler::IncrementCallCount(Node* feedback_vector,
712 Comment(
"increment call count");
713 TNode<Smi> call_count =
714 CAST(LoadFeedbackVectorSlot(feedback_vector, slot_id, kPointerSize));
718 Node* new_count = SmiAdd(
719 call_count, SmiConstant(1 << FeedbackNexus::CallCountField::kShift));
721 StoreFeedbackVectorSlot(feedback_vector, slot_id, new_count,
722 SKIP_WRITE_BARRIER, kPointerSize);
725 void InterpreterAssembler::CollectCallableFeedback(Node* target, Node* context,
726 Node* feedback_vector,
728 Label extra_checks(
this, Label::kDeferred), done(
this);
731 TNode<MaybeObject> feedback =
732 LoadFeedbackVectorSlot(feedback_vector, slot_id);
733 Comment(
"check if monomorphic");
734 TNode<BoolT> is_monomorphic = IsWeakReferenceTo(feedback, CAST(target));
735 GotoIf(is_monomorphic, &done);
738 Comment(
"check if megamorphic");
739 Node* is_megamorphic = WordEqual(
740 feedback, HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())));
741 Branch(is_megamorphic, &done, &extra_checks);
745 Label initialize(
this), mark_megamorphic(
this);
747 Comment(
"check if weak reference");
748 Node* is_uninitialized = WordEqual(
750 HeapConstant(FeedbackVector::UninitializedSentinel(isolate())));
751 GotoIf(is_uninitialized, &initialize);
752 CSA_ASSERT(
this, IsWeakOrCleared(feedback));
756 Comment(
"check if weak reference is cleared");
757 Branch(IsCleared(feedback), &initialize, &mark_megamorphic);
762 Comment(
"check if function in same native context");
763 GotoIf(TaggedIsSmi(target), &mark_megamorphic);
766 VARIABLE(var_current, MachineRepresentation::kTagged, target);
767 Label loop(
this, &var_current), done_loop(
this);
771 Label if_boundfunction(
this), if_function(
this);
772 Node* current = var_current.value();
773 CSA_ASSERT(
this, TaggedIsNotSmi(current));
774 Node* current_instance_type = LoadInstanceType(current);
775 GotoIf(InstanceTypeEqual(current_instance_type, JS_BOUND_FUNCTION_TYPE),
777 Branch(InstanceTypeEqual(current_instance_type, JS_FUNCTION_TYPE),
778 &if_function, &mark_megamorphic);
784 Node* current_context =
785 LoadObjectField(current, JSFunction::kContextOffset);
786 Node* current_native_context = LoadNativeContext(current_context);
787 Branch(WordEqual(LoadNativeContext(context), current_native_context),
788 &done_loop, &mark_megamorphic);
791 BIND(&if_boundfunction);
794 var_current.Bind(LoadObjectField(
795 current, JSBoundFunction::kBoundTargetFunctionOffset));
800 StoreWeakReferenceInFeedbackVector(feedback_vector, slot_id,
802 ReportFeedbackUpdate(feedback_vector, slot_id,
"Call:Initialize");
806 BIND(&mark_megamorphic);
810 Comment(
"transition to megamorphic");
811 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kmegamorphic_symbol));
812 StoreFeedbackVectorSlot(
813 feedback_vector, slot_id,
814 HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
816 ReportFeedbackUpdate(feedback_vector, slot_id,
817 "Call:TransitionMegamorphic");
825 void InterpreterAssembler::CollectCallFeedback(Node* target, Node* context,
826 Node* maybe_feedback_vector,
828 Label feedback_done(
this);
830 GotoIf(IsUndefined(maybe_feedback_vector), &feedback_done);
832 CSA_SLOW_ASSERT(
this, IsFeedbackVector(maybe_feedback_vector));
835 IncrementCallCount(maybe_feedback_vector, slot_id);
838 CollectCallableFeedback(target, context, maybe_feedback_vector, slot_id);
839 Goto(&feedback_done);
841 BIND(&feedback_done);
844 void InterpreterAssembler::CallJSAndDispatch(
845 Node*
function, Node* context,
const RegListNodePair& args,
846 ConvertReceiverMode receiver_mode) {
847 DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
848 DCHECK(Bytecodes::IsCallOrConstruct(bytecode_) ||
849 bytecode_ == Bytecode::kInvokeIntrinsic);
850 DCHECK_EQ(Bytecodes::GetReceiverMode(bytecode_), receiver_mode);
853 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
855 args_count = args.reg_count();
858 Node* receiver_count = Int32Constant(1);
859 args_count = Int32Sub(args.reg_count(), receiver_count);
862 Callable callable = CodeFactory::InterpreterPushArgsThenCall(
863 isolate(), receiver_mode, InterpreterPushArgsMode::kOther);
864 Node* code_target = HeapConstant(callable.code());
866 TailCallStubThenBytecodeDispatch(callable.descriptor(), code_target, context,
867 args_count, args.base_reg_location(),
870 accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
873 template <
class... TArgs>
874 void InterpreterAssembler::CallJSAndDispatch(Node*
function, Node* context,
876 ConvertReceiverMode receiver_mode,
878 DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
879 DCHECK(Bytecodes::IsCallOrConstruct(bytecode_) ||
880 bytecode_ == Bytecode::kInvokeIntrinsic);
881 DCHECK_EQ(Bytecodes::GetReceiverMode(bytecode_), receiver_mode);
882 Callable callable = CodeFactory::Call(isolate());
883 Node* code_target = HeapConstant(callable.code());
885 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
887 TailCallStubThenBytecodeDispatch(
888 callable.descriptor(), code_target, context,
function, arg_count,
889 static_cast<Node*
>(UndefinedConstant()), args...);
891 TailCallStubThenBytecodeDispatch(callable.descriptor(), code_target,
892 context,
function, arg_count, args...);
895 accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
900 template V8_EXPORT_PRIVATE
void InterpreterAssembler::CallJSAndDispatch(
901 Node*
function, Node* context, Node* arg_count,
902 ConvertReceiverMode receiver_mode);
903 template V8_EXPORT_PRIVATE
void InterpreterAssembler::CallJSAndDispatch(
904 Node*
function, Node* context, Node* arg_count,
905 ConvertReceiverMode receiver_mode, Node*);
906 template V8_EXPORT_PRIVATE
void InterpreterAssembler::CallJSAndDispatch(
907 Node*
function, Node* context, Node* arg_count,
908 ConvertReceiverMode receiver_mode, Node*, Node*);
909 template V8_EXPORT_PRIVATE
void InterpreterAssembler::CallJSAndDispatch(
910 Node*
function, Node* context, Node* arg_count,
911 ConvertReceiverMode receiver_mode, Node*, Node*, Node*);
913 void InterpreterAssembler::CallJSWithSpreadAndDispatch(
914 Node*
function, Node* context,
const RegListNodePair& args, Node* slot_id,
915 Node* maybe_feedback_vector) {
916 DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
917 DCHECK_EQ(Bytecodes::GetReceiverMode(bytecode_), ConvertReceiverMode::kAny);
918 CollectCallFeedback(
function, context, maybe_feedback_vector, slot_id);
919 Comment(
"call using CallWithSpread builtin");
920 Callable callable = CodeFactory::InterpreterPushArgsThenCall(
921 isolate(), ConvertReceiverMode::kAny,
922 InterpreterPushArgsMode::kWithFinalSpread);
923 Node* code_target = HeapConstant(callable.code());
925 Node* receiver_count = Int32Constant(1);
926 Node* args_count = Int32Sub(args.reg_count(), receiver_count);
927 TailCallStubThenBytecodeDispatch(callable.descriptor(), code_target, context,
928 args_count, args.base_reg_location(),
931 accumulator_use_ = accumulator_use_ | AccumulatorUse::kWrite;
934 Node* InterpreterAssembler::Construct(Node* target, Node* context,
936 const RegListNodePair& args,
937 Node* slot_id, Node* feedback_vector) {
938 DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
939 VARIABLE(var_result, MachineRepresentation::kTagged);
940 VARIABLE(var_site, MachineRepresentation::kTagged);
941 Label extra_checks(
this, Label::kDeferred), return_result(
this, &var_result),
942 construct(
this), construct_array(
this, &var_site);
945 IncrementCallCount(feedback_vector, slot_id);
948 TNode<MaybeObject> feedback =
949 LoadFeedbackVectorSlot(feedback_vector, slot_id);
950 Branch(IsWeakReferenceTo(feedback, CAST(new_target)), &construct,
955 Label check_allocation_site(
this), check_initialized(
this),
956 initialize(
this), mark_megamorphic(
this);
959 Comment(
"check if megamorphic");
960 Node* is_megamorphic = WordEqual(
961 feedback, HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())));
962 GotoIf(is_megamorphic, &construct);
964 Comment(
"check if weak reference");
965 GotoIfNot(IsWeakOrCleared(feedback), &check_allocation_site);
969 Comment(
"check if weak reference is cleared");
970 Branch(IsCleared(feedback), &initialize, &mark_megamorphic);
972 BIND(&check_allocation_site);
975 Comment(
"check if allocation site");
976 TNode<HeapObject> strong_feedback = CAST(feedback);
977 GotoIfNot(IsAllocationSite(strong_feedback), &check_initialized);
980 Node* array_function = LoadContextElement(LoadNativeContext(context),
981 Context::ARRAY_FUNCTION_INDEX);
982 GotoIfNot(WordEqual(target, array_function), &mark_megamorphic);
983 GotoIfNot(WordEqual(new_target, array_function), &mark_megamorphic);
984 var_site.Bind(strong_feedback);
985 Goto(&construct_array);
988 BIND(&check_initialized);
991 Comment(
"check if uninitialized");
992 Node* is_uninitialized =
993 WordEqual(feedback, LoadRoot(RootIndex::kuninitialized_symbol));
994 Branch(is_uninitialized, &initialize, &mark_megamorphic);
999 Comment(
"check if function in same native context");
1000 GotoIf(TaggedIsSmi(new_target), &mark_megamorphic);
1003 VARIABLE(var_current, MachineRepresentation::kTagged, new_target);
1004 Label loop(
this, &var_current), done_loop(
this);
1008 Label if_boundfunction(
this), if_function(
this);
1009 Node* current = var_current.value();
1010 CSA_ASSERT(
this, TaggedIsNotSmi(current));
1011 Node* current_instance_type = LoadInstanceType(current);
1012 GotoIf(InstanceTypeEqual(current_instance_type, JS_BOUND_FUNCTION_TYPE),
1014 Branch(InstanceTypeEqual(current_instance_type, JS_FUNCTION_TYPE),
1015 &if_function, &mark_megamorphic);
1021 Node* current_context =
1022 LoadObjectField(current, JSFunction::kContextOffset);
1023 Node* current_native_context = LoadNativeContext(current_context);
1024 Branch(WordEqual(LoadNativeContext(context), current_native_context),
1025 &done_loop, &mark_megamorphic);
1028 BIND(&if_boundfunction);
1031 var_current.Bind(LoadObjectField(
1032 current, JSBoundFunction::kBoundTargetFunctionOffset));
1040 Label create_allocation_site(
this), store_weak_reference(
this);
1041 GotoIfNot(WordEqual(target, new_target), &store_weak_reference);
1042 Node* array_function = LoadContextElement(LoadNativeContext(context),
1043 Context::ARRAY_FUNCTION_INDEX);
1044 Branch(WordEqual(target, array_function), &create_allocation_site,
1045 &store_weak_reference);
1047 BIND(&create_allocation_site);
1049 var_site.Bind(CreateAllocationSiteInFeedbackVector(feedback_vector,
1051 ReportFeedbackUpdate(feedback_vector, slot_id,
1052 "Construct:CreateAllocationSite");
1053 Goto(&construct_array);
1056 BIND(&store_weak_reference);
1058 StoreWeakReferenceInFeedbackVector(feedback_vector, slot_id,
1060 ReportFeedbackUpdate(feedback_vector, slot_id,
1061 "Construct:StoreWeakReference");
1066 BIND(&mark_megamorphic);
1070 Comment(
"transition to megamorphic");
1071 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kmegamorphic_symbol));
1072 StoreFeedbackVectorSlot(
1073 feedback_vector, slot_id,
1074 HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
1075 SKIP_WRITE_BARRIER);
1076 ReportFeedbackUpdate(feedback_vector, slot_id,
1077 "Construct:TransitionMegamorphic");
1082 BIND(&construct_array);
1086 Comment(
"call using ConstructArray builtin");
1087 Callable callable = CodeFactory::InterpreterPushArgsThenConstruct(
1088 isolate(), InterpreterPushArgsMode::kArrayFunction);
1089 Node* code_target = HeapConstant(callable.code());
1090 var_result.Bind(CallStub(callable.descriptor(), code_target, context,
1091 args.reg_count(), args.base_reg_location(), target,
1092 new_target, var_site.value()));
1093 Goto(&return_result);
1099 Comment(
"call using Construct builtin");
1100 Callable callable = CodeFactory::InterpreterPushArgsThenConstruct(
1101 isolate(), InterpreterPushArgsMode::kOther);
1102 Node* code_target = HeapConstant(callable.code());
1103 var_result.Bind(CallStub(callable.descriptor(), code_target, context,
1104 args.reg_count(), args.base_reg_location(), target,
1105 new_target, UndefinedConstant()));
1106 Goto(&return_result);
1109 BIND(&return_result);
1110 return var_result.value();
1113 Node* InterpreterAssembler::ConstructWithSpread(Node* target, Node* context,
1115 const RegListNodePair& args,
1117 Node* feedback_vector) {
1121 DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
1122 Label extra_checks(
this, Label::kDeferred), construct(
this);
1125 IncrementCallCount(feedback_vector, slot_id);
1128 TNode<MaybeObject> feedback =
1129 LoadFeedbackVectorSlot(feedback_vector, slot_id);
1130 Branch(IsWeakReferenceTo(feedback, CAST(new_target)), &construct,
1133 BIND(&extra_checks);
1135 Label check_initialized(
this), initialize(
this), mark_megamorphic(
this);
1138 Comment(
"check if megamorphic");
1139 Node* is_megamorphic = WordEqual(
1140 feedback, HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())));
1141 GotoIf(is_megamorphic, &construct);
1143 Comment(
"check if weak reference");
1144 GotoIfNot(IsWeakOrCleared(feedback), &check_initialized);
1148 Comment(
"check if weak reference is cleared");
1149 Branch(IsCleared(feedback), &initialize, &mark_megamorphic);
1151 BIND(&check_initialized);
1154 Comment(
"check if uninitialized");
1155 Node* is_uninitialized =
1156 WordEqual(feedback, LoadRoot(RootIndex::kuninitialized_symbol));
1157 Branch(is_uninitialized, &initialize, &mark_megamorphic);
1162 Comment(
"check if function in same native context");
1163 GotoIf(TaggedIsSmi(new_target), &mark_megamorphic);
1166 VARIABLE(var_current, MachineRepresentation::kTagged, new_target);
1167 Label loop(
this, &var_current), done_loop(
this);
1171 Label if_boundfunction(
this), if_function(
this);
1172 Node* current = var_current.value();
1173 CSA_ASSERT(
this, TaggedIsNotSmi(current));
1174 Node* current_instance_type = LoadInstanceType(current);
1175 GotoIf(InstanceTypeEqual(current_instance_type, JS_BOUND_FUNCTION_TYPE),
1177 Branch(InstanceTypeEqual(current_instance_type, JS_FUNCTION_TYPE),
1178 &if_function, &mark_megamorphic);
1184 Node* current_context =
1185 LoadObjectField(current, JSFunction::kContextOffset);
1186 Node* current_native_context = LoadNativeContext(current_context);
1187 Branch(WordEqual(LoadNativeContext(context), current_native_context),
1188 &done_loop, &mark_megamorphic);
1191 BIND(&if_boundfunction);
1194 var_current.Bind(LoadObjectField(
1195 current, JSBoundFunction::kBoundTargetFunctionOffset));
1200 StoreWeakReferenceInFeedbackVector(feedback_vector, slot_id,
1202 ReportFeedbackUpdate(feedback_vector, slot_id,
1203 "ConstructWithSpread:Initialize");
1207 BIND(&mark_megamorphic);
1211 Comment(
"transition to megamorphic");
1212 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kmegamorphic_symbol));
1213 StoreFeedbackVectorSlot(
1214 feedback_vector, slot_id,
1215 HeapConstant(FeedbackVector::MegamorphicSentinel(isolate())),
1216 SKIP_WRITE_BARRIER);
1217 ReportFeedbackUpdate(feedback_vector, slot_id,
1218 "ConstructWithSpread:TransitionMegamorphic");
1224 Comment(
"call using ConstructWithSpread builtin");
1225 Callable callable = CodeFactory::InterpreterPushArgsThenConstruct(
1226 isolate(), InterpreterPushArgsMode::kWithFinalSpread);
1227 Node* code_target = HeapConstant(callable.code());
1228 return CallStub(callable.descriptor(), code_target, context, args.reg_count(),
1229 args.base_reg_location(), target, new_target,
1230 UndefinedConstant());
1233 Node* InterpreterAssembler::CallRuntimeN(Node* function_id, Node* context,
1234 const RegListNodePair& args,
1236 DCHECK(Bytecodes::MakesCallAlongCriticalPath(bytecode_));
1237 DCHECK(Bytecodes::IsCallRuntime(bytecode_));
1238 Callable callable = CodeFactory::InterpreterCEntry(isolate(), result_size);
1239 Node* code_target = HeapConstant(callable.code());
1242 Node* function_table = ExternalConstant(
1243 ExternalReference::runtime_function_table_address(isolate()));
1244 Node* function_offset =
1245 Int32Mul(function_id, Int32Constant(
sizeof(Runtime::Function)));
1247 IntPtrAdd(function_table, ChangeUint32ToWord(function_offset));
1248 Node* function_entry =
1249 Load(MachineType::Pointer(),
function,
1250 IntPtrConstant(offsetof(Runtime::Function, entry)));
1252 return CallStubR(callable.descriptor(), result_size, code_target, context,
1253 args.reg_count(), args.base_reg_location(), function_entry);
1256 void InterpreterAssembler::UpdateInterruptBudget(Node* weight,
bool backward) {
1257 Comment(
"[ UpdateInterruptBudget");
1259 Node* budget_offset =
1260 IntPtrConstant(BytecodeArray::kInterruptBudgetOffset - kHeapObjectTag);
1264 CSA_ASSERT(
this, Int32GreaterThanOrEqual(weight, Int32Constant(0)));
1267 Variable new_budget(
this, MachineRepresentation::kWord32);
1269 Load(MachineType::Int32(), BytecodeArrayTaggedPointer(), budget_offset);
1271 Node* budget_after_bytecode =
1272 Int32Sub(old_budget, Int32Constant(CurrentBytecodeSize()));
1275 new_budget.Bind(Int32Sub(budget_after_bytecode, weight));
1278 Int32GreaterThanOrEqual(new_budget.value(), Int32Constant(0));
1279 Label ok(
this), interrupt_check(
this, Label::kDeferred);
1280 Branch(condition, &ok, &interrupt_check);
1283 BIND(&interrupt_check);
1285 CallRuntime(Runtime::kInterrupt, GetContext());
1286 new_budget.Bind(Int32Constant(Interpreter::InterruptBudget()));
1294 new_budget.Bind(Int32Add(budget_after_bytecode, weight));
1298 StoreNoWriteBarrier(MachineRepresentation::kWord32,
1299 BytecodeArrayTaggedPointer(), budget_offset,
1300 new_budget.value());
1301 Comment(
"] UpdateInterruptBudget");
1304 Node* InterpreterAssembler::Advance() {
return Advance(CurrentBytecodeSize()); }
1306 Node* InterpreterAssembler::Advance(
int delta) {
1307 return Advance(IntPtrConstant(delta));
1310 Node* InterpreterAssembler::Advance(Node* delta,
bool backward) {
1311 #ifdef V8_TRACE_IGNITION 1312 TraceBytecode(Runtime::kInterpreterTraceBytecodeExit);
1314 Node* next_offset = backward ? IntPtrSub(BytecodeOffset(), delta)
1315 : IntPtrAdd(BytecodeOffset(), delta);
1316 bytecode_offset_.Bind(next_offset);
1320 Node* InterpreterAssembler::Jump(Node* delta,
bool backward) {
1321 DCHECK(!Bytecodes::IsStarLookahead(bytecode_, operand_scale_));
1323 UpdateInterruptBudget(TruncateIntPtrToInt32(delta), backward);
1324 Node* new_bytecode_offset = Advance(delta, backward);
1325 Node* target_bytecode = LoadBytecode(new_bytecode_offset);
1326 return DispatchToBytecode(target_bytecode, new_bytecode_offset);
1329 Node* InterpreterAssembler::Jump(Node* delta) {
return Jump(delta,
false); }
1331 Node* InterpreterAssembler::JumpBackward(Node* delta) {
1332 return Jump(delta,
true);
1335 void InterpreterAssembler::JumpConditional(Node* condition, Node* delta) {
1336 Label match(
this), no_match(
this);
1338 Branch(condition, &match, &no_match);
1345 void InterpreterAssembler::JumpIfWordEqual(Node* lhs, Node* rhs, Node* delta) {
1346 JumpConditional(WordEqual(lhs, rhs), delta);
1349 void InterpreterAssembler::JumpIfWordNotEqual(Node* lhs, Node* rhs,
1351 JumpConditional(WordNotEqual(lhs, rhs), delta);
1354 Node* InterpreterAssembler::LoadBytecode(Node* bytecode_offset) {
1356 Load(MachineType::Uint8(), BytecodeArrayTaggedPointer(), bytecode_offset);
1357 return ChangeUint32ToWord(bytecode);
1360 Node* InterpreterAssembler::StarDispatchLookahead(Node* target_bytecode) {
1361 Label do_inline_star(
this), done(
this);
1363 Variable var_bytecode(
this, MachineType::PointerRepresentation());
1364 var_bytecode.Bind(target_bytecode);
1366 Node* star_bytecode = IntPtrConstant(static_cast<int>(Bytecode::kStar));
1367 Node* is_star = WordEqual(target_bytecode, star_bytecode);
1368 Branch(is_star, &do_inline_star, &done);
1370 BIND(&do_inline_star);
1373 var_bytecode.Bind(LoadBytecode(BytecodeOffset()));
1377 return var_bytecode.value();
1380 void InterpreterAssembler::InlineStar() {
1381 Bytecode previous_bytecode = bytecode_;
1382 AccumulatorUse previous_acc_use = accumulator_use_;
1384 bytecode_ = Bytecode::kStar;
1385 accumulator_use_ = AccumulatorUse::kNone;
1387 #ifdef V8_TRACE_IGNITION 1388 TraceBytecode(Runtime::kInterpreterTraceBytecodeEntry);
1390 StoreRegister(GetAccumulator(),
1391 BytecodeOperandReg(0, LoadSensitivity::kSafe));
1393 DCHECK_EQ(accumulator_use_, Bytecodes::GetAccumulatorUse(bytecode_));
1396 bytecode_ = previous_bytecode;
1397 accumulator_use_ = previous_acc_use;
1400 Node* InterpreterAssembler::Dispatch() {
1401 Comment(
"========= Dispatch");
1402 DCHECK_IMPLIES(Bytecodes::MakesCallAlongCriticalPath(bytecode_), made_call_);
1403 Node* target_offset = Advance();
1404 Node* target_bytecode = LoadBytecode(target_offset);
1406 if (Bytecodes::IsStarLookahead(bytecode_, operand_scale_)) {
1407 target_bytecode = StarDispatchLookahead(target_bytecode);
1409 return DispatchToBytecode(target_bytecode, BytecodeOffset());
1412 Node* InterpreterAssembler::DispatchToBytecode(Node* target_bytecode,
1413 Node* new_bytecode_offset) {
1414 if (FLAG_trace_ignition_dispatches) {
1415 TraceBytecodeDispatch(target_bytecode);
1418 Node* target_code_entry =
1419 Load(MachineType::Pointer(), DispatchTableRawPointer(),
1420 TimesPointerSize(target_bytecode));
1422 return DispatchToBytecodeHandlerEntry(target_code_entry, new_bytecode_offset,
1426 Node* InterpreterAssembler::DispatchToBytecodeHandler(Node* handler,
1427 Node* bytecode_offset,
1428 Node* target_bytecode) {
1430 Node* handler_entry =
1431 IntPtrAdd(BitcastTaggedToWord(handler),
1432 IntPtrConstant(Code::kHeaderSize - kHeapObjectTag));
1433 return DispatchToBytecodeHandlerEntry(handler_entry, bytecode_offset,
1437 Node* InterpreterAssembler::DispatchToBytecodeHandlerEntry(
1438 Node* handler_entry, Node* bytecode_offset, Node* target_bytecode) {
1440 Node* poisoned_handler_entry = WordPoisonOnSpeculation(handler_entry);
1441 return TailCallBytecodeDispatch(
1442 InterpreterDispatchDescriptor{}, poisoned_handler_entry,
1443 GetAccumulatorUnchecked(), bytecode_offset, BytecodeArrayTaggedPointer(),
1444 DispatchTableRawPointer());
1447 void InterpreterAssembler::DispatchWide(OperandScale operand_scale) {
1455 DCHECK_IMPLIES(Bytecodes::MakesCallAlongCriticalPath(bytecode_), made_call_);
1456 Node* next_bytecode_offset = Advance(1);
1457 Node* next_bytecode = LoadBytecode(next_bytecode_offset);
1459 if (FLAG_trace_ignition_dispatches) {
1460 TraceBytecodeDispatch(next_bytecode);
1464 switch (operand_scale) {
1465 case OperandScale::kDouble:
1466 base_index = IntPtrConstant(1 << kBitsPerByte);
1468 case OperandScale::kQuadruple:
1469 base_index = IntPtrConstant(2 << kBitsPerByte);
1474 Node* target_index = IntPtrAdd(base_index, next_bytecode);
1475 Node* target_code_entry =
1476 Load(MachineType::Pointer(), DispatchTableRawPointer(),
1477 TimesPointerSize(target_index));
1479 DispatchToBytecodeHandlerEntry(target_code_entry, next_bytecode_offset,
1483 void InterpreterAssembler::UpdateInterruptBudgetOnReturn() {
1502 const int kFirstBytecodeOffset = BytecodeArray::kHeaderSize - kHeapObjectTag;
1503 Node* profiling_weight = Int32Sub(TruncateIntPtrToInt32(BytecodeOffset()),
1504 Int32Constant(kFirstBytecodeOffset));
1505 UpdateInterruptBudget(profiling_weight,
true);
1508 Node* InterpreterAssembler::LoadOSRNestingLevel() {
1509 return LoadObjectField(BytecodeArrayTaggedPointer(),
1510 BytecodeArray::kOSRNestingLevelOffset,
1511 MachineType::Int8());
1514 void InterpreterAssembler::Abort(AbortReason abort_reason) {
1515 disable_stack_check_across_call_ =
true;
1516 Node* abort_id = SmiConstant(abort_reason);
1517 CallRuntime(Runtime::kAbort, GetContext(), abort_id);
1518 disable_stack_check_across_call_ =
false;
1521 void InterpreterAssembler::AbortIfWordNotEqual(Node* lhs, Node* rhs,
1522 AbortReason abort_reason) {
1523 Label ok(
this), abort(
this, Label::kDeferred);
1524 Branch(WordEqual(lhs, rhs), &ok, &abort);
1527 Abort(abort_reason);
1533 void InterpreterAssembler::MaybeDropFrames(Node* context) {
1534 Node* restart_fp_address =
1535 ExternalConstant(ExternalReference::debug_restart_fp_address(isolate()));
1537 Node* restart_fp = Load(MachineType::Pointer(), restart_fp_address);
1538 Node* null = IntPtrConstant(0);
1540 Label ok(
this), drop_frames(
this);
1541 Branch(IntPtrEqual(restart_fp, null), &ok, &drop_frames);
1546 CallStub(CodeFactory::FrameDropperTrampoline(isolate()), context, restart_fp);
1547 Abort(AbortReason::kUnexpectedReturnFromFrameDropper);
1553 void InterpreterAssembler::TraceBytecode(Runtime::FunctionId function_id) {
1554 CallRuntime(function_id, GetContext(), BytecodeArrayTaggedPointer(),
1555 SmiTag(BytecodeOffset()), GetAccumulatorUnchecked());
1558 void InterpreterAssembler::TraceBytecodeDispatch(Node* target_bytecode) {
1559 Node* counters_table = ExternalConstant(
1560 ExternalReference::interpreter_dispatch_counters(isolate()));
1561 Node* source_bytecode_table_index = IntPtrConstant(
1562 static_cast<int>(bytecode_) * (static_cast<int>(Bytecode::kLast) + 1));
1564 Node* counter_offset =
1565 TimesPointerSize(IntPtrAdd(source_bytecode_table_index, target_bytecode));
1567 Load(MachineType::IntPtr(), counters_table, counter_offset);
1569 Label counter_ok(
this), counter_saturated(
this, Label::kDeferred);
1571 Node* counter_reached_max = WordEqual(
1572 old_counter, IntPtrConstant(std::numeric_limits<uintptr_t>::max()));
1573 Branch(counter_reached_max, &counter_saturated, &counter_ok);
1577 Node* new_counter = IntPtrAdd(old_counter, IntPtrConstant(1));
1578 StoreNoWriteBarrier(MachineType::PointerRepresentation(), counters_table,
1579 counter_offset, new_counter);
1580 Goto(&counter_saturated);
1583 BIND(&counter_saturated);
1587 bool InterpreterAssembler::TargetSupportsUnalignedAccess() {
1588 #if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64 1590 #elif V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390 || \ 1591 V8_TARGET_ARCH_ARM || V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_PPC 1594 #error "Unknown Architecture" 1598 void InterpreterAssembler::AbortIfRegisterCountInvalid(
1599 Node* parameters_and_registers, Node* formal_parameter_count,
1600 Node* register_count) {
1601 Node* array_size = LoadAndUntagFixedArrayBaseLength(parameters_and_registers);
1603 Label ok(
this), abort(
this, Label::kDeferred);
1604 Branch(UintPtrLessThanOrEqual(
1605 IntPtrAdd(formal_parameter_count, register_count), array_size),
1609 Abort(AbortReason::kInvalidParametersAndRegistersInGenerator);
1615 Node* InterpreterAssembler::ExportParametersAndRegisterFile(
1616 TNode<FixedArray> array,
const RegListNodePair& registers,
1617 TNode<Int32T> formal_parameter_count) {
1620 TNode<IntPtrT> formal_parameter_count_intptr =
1621 ChangeInt32ToIntPtr(formal_parameter_count);
1622 Node* register_count = ChangeUint32ToWord(registers.reg_count());
1623 if (FLAG_debug_code) {
1624 CSA_ASSERT(
this, IntPtrEqual(registers.base_reg_location(),
1625 RegisterLocation(Register(0))));
1626 AbortIfRegisterCountInvalid(array, formal_parameter_count_intptr,
1631 Variable var_index(
this, MachineType::PointerRepresentation());
1632 var_index.Bind(IntPtrConstant(0));
1635 Label loop(
this, &var_index), done_loop(
this);
1637 Node* reg_base = IntPtrAdd(
1638 IntPtrConstant(Register::FromParameterIndex(0, 1).ToOperand() - 1),
1639 formal_parameter_count_intptr);
1644 Node* index = var_index.value();
1645 GotoIfNot(UintPtrLessThan(index, formal_parameter_count_intptr),
1648 Node* reg_index = IntPtrSub(reg_base, index);
1649 Node* value = LoadRegister(reg_index);
1651 StoreFixedArrayElement(array, index, value);
1653 var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
1663 Variable var_index(
this, MachineType::PointerRepresentation());
1664 var_index.Bind(IntPtrConstant(0));
1666 Label loop(
this, &var_index), done_loop(
this);
1670 Node* index = var_index.value();
1671 GotoIfNot(UintPtrLessThan(index, register_count), &done_loop);
1674 IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index);
1675 Node* value = LoadRegister(reg_index);
1677 Node* array_index = IntPtrAdd(formal_parameter_count_intptr, index);
1678 StoreFixedArrayElement(array, array_index, value);
1680 var_index.Bind(IntPtrAdd(index, IntPtrConstant(1)));
1689 Node* InterpreterAssembler::ImportRegisterFile(
1690 TNode<FixedArray> array,
const RegListNodePair& registers,
1691 TNode<Int32T> formal_parameter_count) {
1692 TNode<IntPtrT> formal_parameter_count_intptr =
1693 ChangeInt32ToIntPtr(formal_parameter_count);
1694 TNode<UintPtrT> register_count = ChangeUint32ToWord(registers.reg_count());
1695 if (FLAG_debug_code) {
1696 CSA_ASSERT(
this, IntPtrEqual(registers.base_reg_location(),
1697 RegisterLocation(Register(0))));
1698 AbortIfRegisterCountInvalid(array, formal_parameter_count_intptr,
1702 TVARIABLE(IntPtrT, var_index, IntPtrConstant(0));
1706 Label loop(
this, &var_index), done_loop(
this);
1710 TNode<IntPtrT> index = var_index.value();
1711 GotoIfNot(UintPtrLessThan(index, register_count), &done_loop);
1713 TNode<IntPtrT> array_index =
1714 IntPtrAdd(formal_parameter_count_intptr, index);
1715 TNode<Object> value = LoadFixedArrayElement(array, array_index);
1717 TNode<IntPtrT> reg_index =
1718 IntPtrSub(IntPtrConstant(Register(0).ToOperand()), index);
1719 StoreRegister(value, reg_index);
1721 StoreFixedArrayElement(array, array_index,
1722 LoadRoot(RootIndex::kStaleRegister));
1724 var_index = IntPtrAdd(index, IntPtrConstant(1));
1732 int InterpreterAssembler::CurrentBytecodeSize()
const {
1733 return Bytecodes::Size(bytecode_, operand_scale_);
1736 void InterpreterAssembler::ToNumberOrNumeric(Object::Conversion mode) {
1737 Node*
object = GetAccumulator();
1738 Node* context = GetContext();
1740 Variable var_type_feedback(
this, MachineRepresentation::kTaggedSigned);
1741 Variable var_result(
this, MachineRepresentation::kTagged);
1742 Label if_done(
this), if_objectissmi(
this), if_objectisheapnumber(
this),
1743 if_objectisother(
this, Label::kDeferred);
1745 GotoIf(TaggedIsSmi(
object), &if_objectissmi);
1746 Branch(IsHeapNumber(
object), &if_objectisheapnumber, &if_objectisother);
1748 BIND(&if_objectissmi);
1750 var_result.Bind(
object);
1751 var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kSignedSmall));
1755 BIND(&if_objectisheapnumber);
1757 var_result.Bind(
object);
1758 var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kNumber));
1762 BIND(&if_objectisother);
1764 auto builtin = Builtins::kNonNumberToNumber;
1765 if (mode == Object::Conversion::kToNumeric) {
1766 builtin = Builtins::kNonNumberToNumeric;
1768 Label not_bigint(
this);
1769 GotoIfNot(IsBigInt(
object), ¬_bigint);
1771 var_result.Bind(
object);
1772 var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kBigInt));
1779 var_result.Bind(CallBuiltin(builtin, context,
object));
1780 var_type_feedback.Bind(SmiConstant(BinaryOperationFeedback::kAny));
1787 Node* slot_index = BytecodeOperandIdx(0);
1788 Node* maybe_feedback_vector = LoadFeedbackVectorUnchecked();
1790 UpdateFeedback(var_type_feedback.value(), maybe_feedback_vector, slot_index);
1792 SetAccumulator(var_result.value());