5 #include "src/compiler/backend/instruction-selector.h" 9 #include "src/assembler-inl.h" 10 #include "src/base/adapters.h" 11 #include "src/compiler/backend/instruction-selector-impl.h" 12 #include "src/compiler/compiler-source-position-table.h" 13 #include "src/compiler/node-matchers.h" 14 #include "src/compiler/pipeline.h" 15 #include "src/compiler/schedule.h" 16 #include "src/compiler/state-values-utils.h" 17 #include "src/deoptimizer.h" 23 InstructionSelector::InstructionSelector(
24 Zone* zone,
size_t node_count, Linkage* linkage,
25 InstructionSequence* sequence, Schedule* schedule,
26 SourcePositionTable* source_positions, Frame* frame,
27 EnableSwitchJumpTable enable_switch_jump_table,
28 SourcePositionMode source_position_mode, Features features,
29 EnableScheduling enable_scheduling,
30 EnableRootsRelativeAddressing enable_roots_relative_addressing,
31 PoisoningMitigationLevel poisoning_level, EnableTraceTurboJson trace_turbo)
35 source_positions_(source_positions),
36 source_position_mode_(source_position_mode),
39 current_block_(nullptr),
41 continuation_inputs_(sequence->zone()),
42 continuation_outputs_(sequence->zone()),
43 defined_(node_count, false, zone),
44 used_(node_count, false, zone),
45 effect_level_(node_count, 0, zone),
46 virtual_registers_(node_count,
47 InstructionOperand::kInvalidVirtualRegister, zone),
48 virtual_register_rename_(zone),
50 enable_scheduling_(enable_scheduling),
51 enable_roots_relative_addressing_(enable_roots_relative_addressing),
52 enable_switch_jump_table_(enable_switch_jump_table),
53 poisoning_level_(poisoning_level),
55 instruction_selection_failed_(false),
56 instr_origins_(sequence->zone()),
57 trace_turbo_(trace_turbo) {
58 instructions_.reserve(node_count);
59 continuation_inputs_.reserve(5);
60 continuation_outputs_.reserve(2);
62 if (trace_turbo_ == kEnableTraceTurboJson) {
63 instr_origins_.assign(node_count, {-1, 0});
67 bool InstructionSelector::SelectInstructions() {
69 BasicBlockVector* blocks = schedule()->rpo_order();
70 for (
auto const block : *blocks) {
71 if (!block->IsLoopHeader())
continue;
72 DCHECK_LE(2u, block->PredecessorCount());
73 for (Node*
const phi : *block) {
74 if (phi->opcode() != IrOpcode::kPhi)
continue;
77 for (Node*
const input : phi->inputs()) {
84 for (
auto i = blocks->rbegin();
i != blocks->rend(); ++
i) {
86 if (instruction_selection_failed())
return false;
90 if (UseInstructionScheduling()) {
91 scheduler_ =
new (zone()) InstructionScheduler(zone(), sequence());
94 for (
auto const block : *blocks) {
95 InstructionBlock* instruction_block =
96 sequence()->InstructionBlockAt(RpoNumber::FromInt(block->rpo_number()));
97 for (
size_t i = 0;
i < instruction_block->phis().size();
i++) {
98 UpdateRenamesInPhi(instruction_block->PhiAt(
i));
100 size_t end = instruction_block->code_end();
101 size_t start = instruction_block->code_start();
102 DCHECK_LE(end, start);
103 StartBlock(RpoNumber::FromInt(block->rpo_number()));
105 while (start-- > end + 1) {
106 UpdateRenames(instructions_[start]);
107 AddInstruction(instructions_[start]);
109 UpdateRenames(instructions_[end]);
110 AddTerminator(instructions_[end]);
112 EndBlock(RpoNumber::FromInt(block->rpo_number()));
115 sequence()->ValidateSSA();
120 void InstructionSelector::StartBlock(RpoNumber rpo) {
121 if (UseInstructionScheduling()) {
122 DCHECK_NOT_NULL(scheduler_);
123 scheduler_->StartBlock(rpo);
125 sequence()->StartBlock(rpo);
129 void InstructionSelector::EndBlock(RpoNumber rpo) {
130 if (UseInstructionScheduling()) {
131 DCHECK_NOT_NULL(scheduler_);
132 scheduler_->EndBlock(rpo);
134 sequence()->EndBlock(rpo);
138 void InstructionSelector::AddTerminator(Instruction* instr) {
139 if (UseInstructionScheduling()) {
140 DCHECK_NOT_NULL(scheduler_);
141 scheduler_->AddTerminator(instr);
143 sequence()->AddInstruction(instr);
147 void InstructionSelector::AddInstruction(Instruction* instr) {
148 if (UseInstructionScheduling()) {
149 DCHECK_NOT_NULL(scheduler_);
150 scheduler_->AddInstruction(instr);
152 sequence()->AddInstruction(instr);
156 Instruction* InstructionSelector::Emit(InstructionCode opcode,
157 InstructionOperand output,
159 InstructionOperand* temps) {
160 size_t output_count = output.IsInvalid() ? 0 : 1;
161 return Emit(opcode, output_count, &output, 0,
nullptr, temp_count, temps);
164 Instruction* InstructionSelector::Emit(InstructionCode opcode,
165 InstructionOperand output,
166 InstructionOperand a,
size_t temp_count,
167 InstructionOperand* temps) {
168 size_t output_count = output.IsInvalid() ? 0 : 1;
169 return Emit(opcode, output_count, &output, 1, &a, temp_count, temps);
172 Instruction* InstructionSelector::Emit(InstructionCode opcode,
173 InstructionOperand output,
174 InstructionOperand a,
175 InstructionOperand b,
size_t temp_count,
176 InstructionOperand* temps) {
177 size_t output_count = output.IsInvalid() ? 0 : 1;
178 InstructionOperand inputs[] = {a, b};
179 size_t input_count = arraysize(inputs);
180 return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
184 Instruction* InstructionSelector::Emit(InstructionCode opcode,
185 InstructionOperand output,
186 InstructionOperand a,
187 InstructionOperand b,
188 InstructionOperand c,
size_t temp_count,
189 InstructionOperand* temps) {
190 size_t output_count = output.IsInvalid() ? 0 : 1;
191 InstructionOperand inputs[] = {a, b, c};
192 size_t input_count = arraysize(inputs);
193 return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
197 Instruction* InstructionSelector::Emit(
198 InstructionCode opcode, InstructionOperand output, InstructionOperand a,
199 InstructionOperand b, InstructionOperand c, InstructionOperand d,
200 size_t temp_count, InstructionOperand* temps) {
201 size_t output_count = output.IsInvalid() ? 0 : 1;
202 InstructionOperand inputs[] = {a, b, c, d};
203 size_t input_count = arraysize(inputs);
204 return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
208 Instruction* InstructionSelector::Emit(
209 InstructionCode opcode, InstructionOperand output, InstructionOperand a,
210 InstructionOperand b, InstructionOperand c, InstructionOperand d,
211 InstructionOperand e,
size_t temp_count, InstructionOperand* temps) {
212 size_t output_count = output.IsInvalid() ? 0 : 1;
213 InstructionOperand inputs[] = {a, b, c, d, e};
214 size_t input_count = arraysize(inputs);
215 return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
219 Instruction* InstructionSelector::Emit(
220 InstructionCode opcode, InstructionOperand output, InstructionOperand a,
221 InstructionOperand b, InstructionOperand c, InstructionOperand d,
222 InstructionOperand e, InstructionOperand f,
size_t temp_count,
223 InstructionOperand* temps) {
224 size_t output_count = output.IsInvalid() ? 0 : 1;
225 InstructionOperand inputs[] = {a, b, c, d, e, f};
226 size_t input_count = arraysize(inputs);
227 return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
231 Instruction* InstructionSelector::Emit(
232 InstructionCode opcode,
size_t output_count, InstructionOperand* outputs,
233 size_t input_count, InstructionOperand* inputs,
size_t temp_count,
234 InstructionOperand* temps) {
235 if (output_count >= Instruction::kMaxOutputCount ||
236 input_count >= Instruction::kMaxInputCount ||
237 temp_count >= Instruction::kMaxTempCount) {
238 set_instruction_selection_failed();
243 Instruction::New(instruction_zone(), opcode, output_count, outputs,
244 input_count, inputs, temp_count, temps);
248 Instruction* InstructionSelector::Emit(Instruction* instr) {
249 instructions_.push_back(instr);
253 bool InstructionSelector::CanCover(Node* user, Node* node)
const {
255 if (schedule()->block(node) != schedule()->block(user)) {
259 if (node->op()->HasProperty(Operator::kPure)) {
260 return node->OwnedBy(user);
263 if (GetEffectLevel(node) != GetEffectLevel(user)) {
267 for (Edge
const edge : node->use_edges()) {
268 if (edge.from() != user && NodeProperties::IsValueEdge(edge)) {
275 bool InstructionSelector::CanCoverTransitively(Node* user, Node* node,
276 Node* node_input)
const {
277 if (CanCover(user, node) && CanCover(node, node_input)) {
279 if (node->op()->HasProperty(Operator::kPure)) {
281 if (node_input->op()->HasProperty(Operator::kPure))
return true;
283 return GetEffectLevel(user) == GetEffectLevel(node_input);
290 bool InstructionSelector::IsOnlyUserOfNodeInSameBlock(Node* user,
292 BasicBlock* bb_user = schedule()->block(user);
293 BasicBlock* bb_node = schedule()->block(node);
294 if (bb_user != bb_node)
return false;
295 for (Edge
const edge : node->use_edges()) {
296 Node* from = edge.from();
297 if ((from != user) && (schedule()->block(from) == bb_user)) {
304 void InstructionSelector::UpdateRenames(Instruction* instruction) {
305 for (
size_t i = 0;
i < instruction->InputCount();
i++) {
306 TryRename(instruction->InputAt(
i));
310 void InstructionSelector::UpdateRenamesInPhi(PhiInstruction* phi) {
311 for (
size_t i = 0;
i < phi->operands().size();
i++) {
312 int vreg = phi->operands()[
i];
313 int renamed = GetRename(vreg);
314 if (vreg != renamed) {
315 phi->RenameInput(
i, renamed);
320 int InstructionSelector::GetRename(
int virtual_register) {
321 int rename = virtual_register;
323 if (static_cast<size_t>(rename) >= virtual_register_rename_.size())
break;
324 int next = virtual_register_rename_[rename];
325 if (next == InstructionOperand::kInvalidVirtualRegister) {
333 void InstructionSelector::TryRename(InstructionOperand* op) {
334 if (!op->IsUnallocated())
return;
335 UnallocatedOperand* unalloc = UnallocatedOperand::cast(op);
336 int vreg = unalloc->virtual_register();
337 int rename = GetRename(vreg);
338 if (rename != vreg) {
339 *unalloc = UnallocatedOperand(*unalloc, rename);
343 void InstructionSelector::SetRename(
const Node* node,
const Node* rename) {
344 int vreg = GetVirtualRegister(node);
345 if (static_cast<size_t>(vreg) >= virtual_register_rename_.size()) {
346 int invalid = InstructionOperand::kInvalidVirtualRegister;
347 virtual_register_rename_.resize(vreg + 1, invalid);
349 virtual_register_rename_[vreg] = GetVirtualRegister(rename);
352 int InstructionSelector::GetVirtualRegister(
const Node* node) {
353 DCHECK_NOT_NULL(node);
354 size_t const id = node->id();
355 DCHECK_LT(
id, virtual_registers_.size());
356 int virtual_register = virtual_registers_[id];
357 if (virtual_register == InstructionOperand::kInvalidVirtualRegister) {
358 virtual_register = sequence()->NextVirtualRegister();
359 virtual_registers_[id] = virtual_register;
361 return virtual_register;
364 const std::map<NodeId, int> InstructionSelector::GetVirtualRegistersForTesting()
366 std::map<NodeId, int> virtual_registers;
367 for (
size_t n = 0; n < virtual_registers_.size(); ++n) {
368 if (virtual_registers_[n] != InstructionOperand::kInvalidVirtualRegister) {
369 NodeId
const id =
static_cast<NodeId
>(n);
370 virtual_registers.insert(std::make_pair(
id, virtual_registers_[n]));
373 return virtual_registers;
376 bool InstructionSelector::IsDefined(Node* node)
const {
377 DCHECK_NOT_NULL(node);
378 size_t const id = node->id();
379 DCHECK_LT(
id, defined_.size());
383 void InstructionSelector::MarkAsDefined(Node* node) {
384 DCHECK_NOT_NULL(node);
385 size_t const id = node->id();
386 DCHECK_LT(
id, defined_.size());
390 bool InstructionSelector::IsUsed(Node* node)
const {
391 DCHECK_NOT_NULL(node);
394 if (node->opcode() == IrOpcode::kRetain)
return true;
395 if (!node->op()->HasProperty(Operator::kEliminatable))
return true;
396 size_t const id = node->id();
397 DCHECK_LT(
id, used_.size());
401 void InstructionSelector::MarkAsUsed(Node* node) {
402 DCHECK_NOT_NULL(node);
403 size_t const id = node->id();
404 DCHECK_LT(
id, used_.size());
408 int InstructionSelector::GetEffectLevel(Node* node)
const {
409 DCHECK_NOT_NULL(node);
410 size_t const id = node->id();
411 DCHECK_LT(
id, effect_level_.size());
412 return effect_level_[id];
415 void InstructionSelector::SetEffectLevel(Node* node,
int effect_level) {
416 DCHECK_NOT_NULL(node);
417 size_t const id = node->id();
418 DCHECK_LT(
id, effect_level_.size());
419 effect_level_[id] = effect_level;
422 bool InstructionSelector::CanAddressRelativeToRootsRegister()
const {
423 return enable_roots_relative_addressing_ == kEnableRootsRelativeAddressing &&
424 CanUseRootsRegister();
427 bool InstructionSelector::CanUseRootsRegister()
const {
428 return linkage()->GetIncomingDescriptor()->flags() &
429 CallDescriptor::kCanUseRoots;
432 void InstructionSelector::MarkAsRepresentation(MachineRepresentation rep,
433 const InstructionOperand& op) {
434 UnallocatedOperand unalloc = UnallocatedOperand::cast(op);
435 sequence()->MarkAsRepresentation(rep, unalloc.virtual_register());
438 void InstructionSelector::MarkAsRepresentation(MachineRepresentation rep,
440 sequence()->MarkAsRepresentation(rep, GetVirtualRegister(node));
445 InstructionOperand OperandForDeopt(Isolate* isolate, OperandGenerator* g,
446 Node* input, FrameStateInputKind kind,
447 MachineRepresentation rep) {
448 if (rep == MachineRepresentation::kNone) {
449 return g->TempImmediate(FrameStateDescriptor::kImpossibleValue);
452 switch (input->opcode()) {
453 case IrOpcode::kInt32Constant:
454 case IrOpcode::kInt64Constant:
455 case IrOpcode::kNumberConstant:
456 case IrOpcode::kFloat32Constant:
457 case IrOpcode::kFloat64Constant:
458 case IrOpcode::kDelayedStringConstant:
459 return g->UseImmediate(input);
460 case IrOpcode::kHeapConstant: {
461 if (!CanBeTaggedPointer(rep)) {
469 return InstructionOperand();
472 Handle<HeapObject> constant = HeapConstantOf(input->op());
473 RootIndex root_index;
474 if (isolate->roots_table().IsRootHandle(constant, &root_index) &&
475 root_index == RootIndex::kOptimizedOut) {
478 return InstructionOperand();
481 return g->UseImmediate(input);
483 case IrOpcode::kArgumentsElementsState:
484 case IrOpcode::kArgumentsLengthState:
485 case IrOpcode::kObjectState:
486 case IrOpcode::kTypedObjectState:
491 case FrameStateInputKind::kStackSlot:
492 return g->UseUniqueSlot(input);
493 case FrameStateInputKind::kAny:
496 return g->UseAnyAtEnd(input);
507 static const size_t kNotDuplicated = SIZE_MAX;
509 size_t GetObjectId(
Node* node) {
510 DCHECK(node->opcode() == IrOpcode::kTypedObjectState ||
511 node->opcode() == IrOpcode::kObjectId ||
512 node->opcode() == IrOpcode::kArgumentsElementsState);
513 for (
size_t i = 0;
i < objects_.size(); ++
i) {
514 if (objects_[
i] == node)
return i;
518 if (HasObjectId(objects_[
i]) && HasObjectId(node) &&
519 ObjectIdOf(objects_[
i]->op()) == ObjectIdOf(node->op())) {
523 DCHECK(node->opcode() == IrOpcode::kTypedObjectState ||
524 node->opcode() == IrOpcode::kArgumentsElementsState);
525 return kNotDuplicated;
528 size_t InsertObject(
Node* node) {
529 DCHECK(node->opcode() == IrOpcode::kTypedObjectState ||
530 node->opcode() == IrOpcode::kObjectId ||
531 node->opcode() == IrOpcode::kArgumentsElementsState);
532 size_t id = objects_.size();
533 objects_.push_back(node);
538 static bool HasObjectId(
Node* node) {
539 return node->opcode() == IrOpcode::kTypedObjectState ||
540 node->opcode() == IrOpcode::kObjectId;
547 size_t InstructionSelector::AddOperandToStateValueDescriptor(
551 if (input ==
nullptr) {
552 values->PushOptimizedOut();
556 switch (input->opcode()) {
557 case IrOpcode::kArgumentsElementsState: {
558 values->PushArgumentsElements(ArgumentsStateTypeOf(input->op()));
561 DCHECK_EQ(StateObjectDeduplicator::kNotDuplicated,
562 deduplicator->GetObjectId(input));
563 deduplicator->InsertObject(input);
566 case IrOpcode::kArgumentsLengthState: {
567 values->PushArgumentsLength(ArgumentsStateTypeOf(input->op()));
570 case IrOpcode::kObjectState: {
573 case IrOpcode::kTypedObjectState:
574 case IrOpcode::kObjectId: {
575 size_t id = deduplicator->GetObjectId(input);
576 if (
id == StateObjectDeduplicator::kNotDuplicated) {
577 DCHECK_EQ(IrOpcode::kTypedObjectState, input->opcode());
579 id = deduplicator->InsertObject(input);
580 StateValueList* nested = values->PushRecursiveField(zone,
id);
581 int const input_count = input->op()->ValueInputCount();
582 ZoneVector<MachineType>
const* types = MachineTypesOf(input->op());
583 for (
int i = 0;
i < input_count; ++
i) {
584 entries += AddOperandToStateValueDescriptor(
585 nested, inputs, g, deduplicator, input->InputAt(
i), types->at(
i),
592 deduplicator->InsertObject(input);
593 values->PushDuplicate(
id);
598 InstructionOperand op =
599 OperandForDeopt(isolate(), g, input, kind, type.representation());
600 if (op.kind() == InstructionOperand::INVALID) {
602 values->PushOptimizedOut();
605 inputs->push_back(op);
606 values->PushPlain(type);
614 size_t InstructionSelector::AddInputsToFrameStateDescriptor(
615 FrameStateDescriptor* descriptor, Node* state, OperandGenerator* g,
616 StateObjectDeduplicator* deduplicator, InstructionOperandVector* inputs,
617 FrameStateInputKind kind, Zone* zone) {
618 DCHECK_EQ(IrOpcode::kFrameState, state->op()->opcode());
621 size_t initial_size = inputs->size();
624 if (descriptor->outer_state()) {
625 entries += AddInputsToFrameStateDescriptor(
626 descriptor->outer_state(), state->InputAt(kFrameStateOuterStateInput),
627 g, deduplicator, inputs, kind, zone);
630 Node* parameters = state->InputAt(kFrameStateParametersInput);
631 Node* locals = state->InputAt(kFrameStateLocalsInput);
632 Node* stack = state->InputAt(kFrameStateStackInput);
633 Node* context = state->InputAt(kFrameStateContextInput);
634 Node*
function = state->InputAt(kFrameStateFunctionInput);
636 DCHECK_EQ(descriptor->parameters_count(),
637 StateValuesAccess(parameters).size());
638 DCHECK_EQ(descriptor->locals_count(), StateValuesAccess(locals).size());
639 DCHECK_EQ(descriptor->stack_count(), StateValuesAccess(stack).size());
641 StateValueList* values_descriptor = descriptor->GetStateValueDescriptors();
643 DCHECK_EQ(values_descriptor->size(), 0u);
644 values_descriptor->ReserveSize(descriptor->GetSize());
646 entries += AddOperandToStateValueDescriptor(
647 values_descriptor, inputs, g, deduplicator,
function,
648 MachineType::AnyTagged(), FrameStateInputKind::kStackSlot, zone);
649 for (StateValuesAccess::TypedNode input_node :
650 StateValuesAccess(parameters)) {
651 entries += AddOperandToStateValueDescriptor(values_descriptor, inputs, g,
652 deduplicator, input_node.node,
653 input_node.type, kind, zone);
655 if (descriptor->HasContext()) {
656 entries += AddOperandToStateValueDescriptor(
657 values_descriptor, inputs, g, deduplicator, context,
658 MachineType::AnyTagged(), FrameStateInputKind::kStackSlot, zone);
660 for (StateValuesAccess::TypedNode input_node : StateValuesAccess(locals)) {
661 entries += AddOperandToStateValueDescriptor(values_descriptor, inputs, g,
662 deduplicator, input_node.node,
663 input_node.type, kind, zone);
665 for (StateValuesAccess::TypedNode input_node : StateValuesAccess(stack)) {
666 entries += AddOperandToStateValueDescriptor(values_descriptor, inputs, g,
667 deduplicator, input_node.node,
668 input_node.type, kind, zone);
670 DCHECK_EQ(initial_size + entries, inputs->size());
674 Instruction* InstructionSelector::EmitWithContinuation(
675 InstructionCode opcode, FlagsContinuation* cont) {
676 return EmitWithContinuation(opcode, 0,
nullptr, 0,
nullptr, cont);
679 Instruction* InstructionSelector::EmitWithContinuation(
680 InstructionCode opcode, InstructionOperand a, FlagsContinuation* cont) {
681 return EmitWithContinuation(opcode, 0,
nullptr, 1, &a, cont);
684 Instruction* InstructionSelector::EmitWithContinuation(
685 InstructionCode opcode, InstructionOperand a, InstructionOperand b,
686 FlagsContinuation* cont) {
687 InstructionOperand inputs[] = {a, b};
688 return EmitWithContinuation(opcode, 0,
nullptr, arraysize(inputs), inputs,
692 Instruction* InstructionSelector::EmitWithContinuation(
693 InstructionCode opcode, InstructionOperand a, InstructionOperand b,
694 InstructionOperand c, FlagsContinuation* cont) {
695 InstructionOperand inputs[] = {a, b, c};
696 return EmitWithContinuation(opcode, 0,
nullptr, arraysize(inputs), inputs,
700 Instruction* InstructionSelector::EmitWithContinuation(
701 InstructionCode opcode,
size_t output_count, InstructionOperand* outputs,
702 size_t input_count, InstructionOperand* inputs, FlagsContinuation* cont) {
703 OperandGenerator g(
this);
705 opcode = cont->Encode(opcode);
707 continuation_inputs_.resize(0);
708 for (
size_t i = 0;
i < input_count;
i++) {
709 continuation_inputs_.push_back(inputs[
i]);
712 continuation_outputs_.resize(0);
713 for (
size_t i = 0;
i < output_count;
i++) {
714 continuation_outputs_.push_back(outputs[
i]);
717 if (cont->IsBranch()) {
718 continuation_inputs_.push_back(g.Label(cont->true_block()));
719 continuation_inputs_.push_back(g.Label(cont->false_block()));
720 }
else if (cont->IsDeoptimize()) {
721 opcode |= MiscField::encode(static_cast<int>(input_count));
722 AppendDeoptimizeArguments(&continuation_inputs_, cont->kind(),
723 cont->reason(), cont->feedback(),
724 cont->frame_state());
725 }
else if (cont->IsSet()) {
726 continuation_outputs_.push_back(g.DefineAsRegister(cont->result()));
727 }
else if (cont->IsTrap()) {
728 int trap_id =
static_cast<int>(cont->trap_id());
729 continuation_inputs_.push_back(g.UseImmediate(trap_id));
731 DCHECK(cont->IsNone());
734 size_t const emit_inputs_size = continuation_inputs_.size();
736 emit_inputs_size ? &continuation_inputs_.front() :
nullptr;
737 size_t const emit_outputs_size = continuation_outputs_.size();
739 emit_outputs_size ? &continuation_outputs_.front() :
nullptr;
740 return Emit(opcode, emit_outputs_size, emit_outputs, emit_inputs_size,
741 emit_inputs, 0,
nullptr);
744 void InstructionSelector::AppendDeoptimizeArguments(
745 InstructionOperandVector* args, DeoptimizeKind kind,
746 DeoptimizeReason reason, VectorSlotPair
const& feedback,
748 OperandGenerator g(
this);
749 FrameStateDescriptor*
const descriptor = GetFrameStateDescriptor(frame_state);
750 DCHECK_NE(DeoptimizeKind::kLazy, kind);
752 sequence()->AddDeoptimizationEntry(descriptor, kind, reason, feedback);
753 args->push_back(g.TempImmediate(state_id));
754 StateObjectDeduplicator deduplicator(instruction_zone());
755 AddInputsToFrameStateDescriptor(descriptor, frame_state, &g, &deduplicator,
756 args, FrameStateInputKind::kAny,
760 Instruction* InstructionSelector::EmitDeoptimize(
761 InstructionCode opcode,
size_t output_count, InstructionOperand* outputs,
762 size_t input_count, InstructionOperand* inputs, DeoptimizeKind kind,
763 DeoptimizeReason reason, VectorSlotPair
const& feedback,
765 InstructionOperandVector args(instruction_zone());
766 for (
size_t i = 0;
i < input_count; ++
i) {
767 args.push_back(inputs[
i]);
769 opcode |= MiscField::encode(static_cast<int>(input_count));
770 AppendDeoptimizeArguments(&args, kind, reason, feedback, frame_state);
771 return Emit(opcode, output_count, outputs, args.size(), &args.front(), 0,
781 : descriptor(call_descriptor),
782 frame_state_descriptor(frame_state),
785 instruction_args(zone),
787 output_nodes.reserve(call_descriptor->ReturnCount());
788 outputs.reserve(call_descriptor->ReturnCount());
789 pushed_nodes.reserve(input_count());
790 instruction_args.reserve(input_count() + frame_state_value_count());
800 size_t input_count()
const {
return descriptor->InputCount(); }
802 size_t frame_state_count()
const {
return descriptor->FrameStateCount(); }
804 size_t frame_state_value_count()
const {
805 return (frame_state_descriptor ==
nullptr)
807 : (frame_state_descriptor->GetTotalSize() +
814 void InstructionSelector::InitializeCallBuffer(
Node* call,
CallBuffer* buffer,
815 CallBufferFlags flags,
817 int stack_param_delta) {
819 size_t ret_count = buffer->descriptor->ReturnCount();
820 DCHECK_LE(call->op()->ValueOutputCount(), ret_count);
822 call->op()->ValueInputCount(),
823 static_cast<int>(buffer->input_count() + buffer->frame_state_count()));
827 if (ret_count == 1) {
828 PushParameter result = {call, buffer->descriptor->GetReturnLocation(0)};
829 buffer->output_nodes.push_back(result);
831 buffer->output_nodes.resize(ret_count);
833 for (
size_t i = 0;
i < ret_count; ++
i) {
834 LinkageLocation location = buffer->descriptor->GetReturnLocation(
i);
835 buffer->output_nodes[
i] = PushParameter(
nullptr, location);
836 if (location.IsCallerFrameSlot()) {
837 stack_count += location.GetSizeInPointers();
840 for (Edge
const edge : call->use_edges()) {
841 if (!NodeProperties::IsValueEdge(edge))
continue;
842 Node* node = edge.from();
843 DCHECK_EQ(IrOpcode::kProjection, node->opcode());
844 size_t const index = ProjectionIndexOf(node->op());
846 DCHECK_LT(index, buffer->output_nodes.size());
847 DCHECK(!buffer->output_nodes[index].node);
848 buffer->output_nodes[index].node = node;
850 frame_->EnsureReturnSlots(stack_count);
854 size_t outputs_needed_by_framestate =
855 buffer->frame_state_descriptor ==
nullptr 857 : buffer->frame_state_descriptor->state_combine()
858 .ConsumedOutputCount();
859 for (
size_t i = 0;
i < buffer->output_nodes.size();
i++) {
860 bool output_is_live = buffer->output_nodes[
i].node !=
nullptr ||
861 i < outputs_needed_by_framestate;
862 if (output_is_live) {
863 LinkageLocation location = buffer->output_nodes[
i].location;
864 MachineRepresentation rep = location.GetType().representation();
866 Node* output = buffer->output_nodes[
i].node;
867 InstructionOperand op = output ==
nullptr 868 ? g.TempLocation(location)
869 : g.DefineAsLocation(output, location);
870 MarkAsRepresentation(rep, op);
872 if (!UnallocatedOperand::cast(op).HasFixedSlotPolicy()) {
873 buffer->outputs.push_back(op);
874 buffer->output_nodes[
i].node =
nullptr;
881 Node* callee = call->InputAt(0);
882 bool call_code_immediate = (flags & kCallCodeImmediate) != 0;
883 bool call_address_immediate = (flags & kCallAddressImmediate) != 0;
884 bool call_use_fixed_target_reg = (flags & kCallFixedTargetRegister) != 0;
885 bool call_through_slot = (flags & kAllowCallThroughSlot) != 0;
886 switch (buffer->descriptor->kind()) {
887 case CallDescriptor::kCallCodeObject:
893 buffer->instruction_args.push_back(
894 (call_code_immediate && callee->opcode() == IrOpcode::kHeapConstant)
895 ? g.UseImmediate(callee)
896 : call_use_fixed_target_reg
897 ? g.UseFixed(callee, kJavaScriptCallCodeStartRegister)
898 : is_tail_call ? g.UseUniqueRegister(callee)
899 : call_through_slot ? g.UseUniqueSlot(callee)
900 : g.UseRegister(callee));
902 case CallDescriptor::kCallAddress:
903 buffer->instruction_args.push_back(
904 (call_address_immediate &&
905 callee->opcode() == IrOpcode::kExternalConstant)
906 ? g.UseImmediate(callee)
907 : call_use_fixed_target_reg
908 ? g.UseFixed(callee, kJavaScriptCallCodeStartRegister)
909 : g.UseRegister(callee));
911 case CallDescriptor::kCallWasmFunction:
912 case CallDescriptor::kCallWasmImportWrapper:
913 buffer->instruction_args.push_back(
914 (call_address_immediate &&
915 (callee->opcode() == IrOpcode::kRelocatableInt64Constant ||
916 callee->opcode() == IrOpcode::kRelocatableInt32Constant))
917 ? g.UseImmediate(callee)
918 : call_use_fixed_target_reg
919 ? g.UseFixed(callee, kJavaScriptCallCodeStartRegister)
920 : g.UseRegister(callee));
922 case CallDescriptor::kCallJSFunction:
923 buffer->instruction_args.push_back(
924 g.UseLocation(callee, buffer->descriptor->GetInputLocation(0)));
927 DCHECK_EQ(1u, buffer->instruction_args.size());
932 buffer->instruction_args.push_back(g.TempImmediate(-1));
933 const size_t poison_alias_index = 1;
934 DCHECK_EQ(buffer->instruction_args.size() - 1, poison_alias_index);
940 size_t frame_state_entries = 0;
941 USE(frame_state_entries);
942 if (buffer->frame_state_descriptor !=
nullptr) {
944 call->InputAt(static_cast<int>(buffer->descriptor->InputCount()));
950 frame_state = NodeProperties::GetFrameStateInput(frame_state);
951 buffer->frame_state_descriptor =
952 buffer->frame_state_descriptor->outer_state();
953 while (buffer->frame_state_descriptor !=
nullptr &&
954 buffer->frame_state_descriptor->type() ==
955 FrameStateType::kArgumentsAdaptor) {
956 frame_state = NodeProperties::GetFrameStateInput(frame_state);
957 buffer->frame_state_descriptor =
958 buffer->frame_state_descriptor->outer_state();
962 int const state_id = sequence()->AddDeoptimizationEntry(
963 buffer->frame_state_descriptor, DeoptimizeKind::kLazy,
964 DeoptimizeReason::kUnknown, VectorSlotPair());
965 buffer->instruction_args.push_back(g.TempImmediate(state_id));
967 StateObjectDeduplicator deduplicator(instruction_zone());
969 frame_state_entries =
970 1 + AddInputsToFrameStateDescriptor(
971 buffer->frame_state_descriptor, frame_state, &g, &deduplicator,
972 &buffer->instruction_args, FrameStateInputKind::kStackSlot,
975 DCHECK_EQ(2 + frame_state_entries, buffer->instruction_args.size());
978 size_t input_count =
static_cast<size_t>(buffer->input_count());
984 auto iter(call->inputs().begin());
985 size_t pushed_count = 0;
986 bool call_tail = (flags & kCallTail) != 0;
987 for (
size_t index = 0; index < input_count; ++iter, ++index) {
988 DCHECK(iter != call->inputs().end());
989 DCHECK_NE(IrOpcode::kFrameState, (*iter)->op()->opcode());
990 if (index == 0)
continue;
992 LinkageLocation location = buffer->descriptor->GetInputLocation(index);
994 location = LinkageLocation::ConvertToTailCallerLocation(
995 location, stack_param_delta);
997 InstructionOperand op = g.UseLocation(*iter, location);
998 UnallocatedOperand unallocated = UnallocatedOperand::cast(op);
999 if (unallocated.HasFixedSlotPolicy() && !call_tail) {
1000 int stack_index = -unallocated.fixed_slot_index() - 1;
1001 if (static_cast<size_t>(stack_index) >= buffer->pushed_nodes.size()) {
1002 buffer->pushed_nodes.resize(stack_index + 1);
1004 PushParameter param = {*iter, location};
1005 buffer->pushed_nodes[stack_index] = param;
1011 if (poisoning_level_ != PoisoningMitigationLevel::kDontPoison &&
1012 unallocated.HasFixedRegisterPolicy()) {
1013 int reg = unallocated.fixed_register_index();
1014 if (Register::from_code(reg) == kSpeculationPoisonRegister) {
1015 buffer->instruction_args[poison_alias_index] = g.TempImmediate(
1016 static_cast<int32_t>(buffer->instruction_args.size()));
1017 op = g.UseRegisterOrSlotOrConstant(*iter);
1020 buffer->instruction_args.push_back(op);
1023 DCHECK_EQ(input_count, buffer->instruction_args.size() + pushed_count -
1024 frame_state_entries - 1);
1025 if (V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK && call_tail &&
1026 stack_param_delta != 0) {
1030 LinkageLocation saved_return_location =
1031 LinkageLocation::ForSavedCallerReturnAddress();
1032 InstructionOperand return_address =
1033 g.UsePointerLocation(LinkageLocation::ConvertToTailCallerLocation(
1034 saved_return_location, stack_param_delta),
1035 saved_return_location);
1036 buffer->instruction_args.push_back(return_address);
1040 bool InstructionSelector::IsSourcePositionUsed(Node* node) {
1041 return (source_position_mode_ == kAllSourcePositions ||
1042 node->opcode() == IrOpcode::kCall ||
1043 node->opcode() == IrOpcode::kCallWithCallerSavedRegisters ||
1044 node->opcode() == IrOpcode::kTrapIf ||
1045 node->opcode() == IrOpcode::kTrapUnless ||
1046 node->opcode() == IrOpcode::kProtectedLoad ||
1047 node->opcode() == IrOpcode::kProtectedStore);
1050 void InstructionSelector::VisitBlock(BasicBlock* block) {
1051 DCHECK(!current_block_);
1052 current_block_ = block;
1053 auto current_num_instructions = [&] {
1054 DCHECK_GE(kMaxInt, instructions_.size());
1055 return static_cast<int>(instructions_.size());
1057 int current_block_end = current_num_instructions();
1059 int effect_level = 0;
1060 for (Node*
const node : *block) {
1061 SetEffectLevel(node, effect_level);
1062 if (node->opcode() == IrOpcode::kStore ||
1063 node->opcode() == IrOpcode::kUnalignedStore ||
1064 node->opcode() == IrOpcode::kCall ||
1065 node->opcode() == IrOpcode::kCallWithCallerSavedRegisters ||
1066 node->opcode() == IrOpcode::kProtectedLoad ||
1067 node->opcode() == IrOpcode::kProtectedStore) {
1074 if (block->control_input() !=
nullptr) {
1075 SetEffectLevel(block->control_input(), effect_level);
1078 auto FinishEmittedInstructions = [&](Node* node,
int instruction_start) {
1079 if (instruction_selection_failed())
return false;
1080 if (current_num_instructions() == instruction_start)
return true;
1081 std::reverse(instructions_.begin() + instruction_start,
1082 instructions_.end());
1083 if (!node)
return true;
1084 if (!source_positions_)
return true;
1085 SourcePosition source_position = source_positions_->GetSourcePosition(node);
1086 if (source_position.IsKnown() && IsSourcePositionUsed(node)) {
1087 sequence()->SetSourcePosition(instructions_[instruction_start],
1095 VisitControl(block);
1096 if (!FinishEmittedInstructions(block->control_input(), current_block_end))
1101 for (
auto node : base::Reversed(*block)) {
1102 int current_node_end = current_num_instructions();
1104 if (IsUsed(node) && !IsDefined(node)) {
1108 if (!FinishEmittedInstructions(node, current_node_end))
return;
1110 if (trace_turbo_ == kEnableTraceTurboJson) {
1111 instr_origins_[node->id()] = {current_num_instructions(),
1117 InstructionBlock* instruction_block =
1118 sequence()->InstructionBlockAt(RpoNumber::FromInt(block->rpo_number()));
1119 if (current_num_instructions() == current_block_end) {
1121 Emit(Instruction::New(sequence()->zone(), kArchNop));
1123 instruction_block->set_code_start(current_num_instructions());
1124 instruction_block->set_code_end(current_block_end);
1125 current_block_ =
nullptr;
1128 void InstructionSelector::VisitControl(BasicBlock* block) {
1132 if (block->SuccessorCount() > 1) {
1133 for (BasicBlock*
const successor : block->successors()) {
1134 for (Node*
const node : *successor) {
1135 if (IrOpcode::IsPhiOpcode(node->opcode())) {
1136 std::ostringstream str;
1137 str <<
"You might have specified merged variables for a label with " 1138 <<
"only one predecessor." << std::endl
1139 <<
"# Current Block: " << *successor << std::endl
1140 <<
"# Node: " << *node;
1141 FATAL(
"%s", str.str().c_str());
1148 Node* input = block->control_input();
1149 int instruction_end =
static_cast<int>(instructions_.size());
1150 switch (block->control()) {
1151 case BasicBlock::kGoto:
1152 VisitGoto(block->SuccessorAt(0));
1154 case BasicBlock::kCall: {
1155 DCHECK_EQ(IrOpcode::kCall, input->opcode());
1156 BasicBlock* success = block->SuccessorAt(0);
1157 BasicBlock* exception = block->SuccessorAt(1);
1158 VisitCall(input, exception);
1162 case BasicBlock::kTailCall: {
1163 DCHECK_EQ(IrOpcode::kTailCall, input->opcode());
1164 VisitTailCall(input);
1167 case BasicBlock::kBranch: {
1168 DCHECK_EQ(IrOpcode::kBranch, input->opcode());
1169 BasicBlock* tbranch = block->SuccessorAt(0);
1170 BasicBlock* fbranch = block->SuccessorAt(1);
1171 if (tbranch == fbranch) {
1174 VisitBranch(input, tbranch, fbranch);
1178 case BasicBlock::kSwitch: {
1179 DCHECK_EQ(IrOpcode::kSwitch, input->opcode());
1181 BasicBlock* default_branch = block->successors().back();
1182 DCHECK_EQ(IrOpcode::kIfDefault, default_branch->front()->opcode());
1184 int32_t min_value = std::numeric_limits<int32_t>::max();
1185 int32_t max_value = std::numeric_limits<int32_t>::min();
1186 size_t case_count = block->SuccessorCount() - 1;
1187 ZoneVector<CaseInfo> cases(case_count, zone());
1188 for (
size_t i = 0;
i < case_count; ++
i) {
1189 BasicBlock* branch = block->SuccessorAt(
i);
1190 const IfValueParameters& p = IfValueParametersOf(branch->front()->op());
1191 cases[
i] = CaseInfo{p.value(), p.comparison_order(), branch};
1192 if (min_value > p.value()) min_value = p.value();
1193 if (max_value < p.value()) max_value = p.value();
1195 SwitchInfo sw(cases, min_value, max_value, default_branch);
1196 VisitSwitch(input, sw);
1199 case BasicBlock::kReturn: {
1200 DCHECK_EQ(IrOpcode::kReturn, input->opcode());
1204 case BasicBlock::kDeoptimize: {
1205 DeoptimizeParameters p = DeoptimizeParametersOf(input->op());
1206 Node* value = input->InputAt(0);
1207 VisitDeoptimize(p.kind(), p.reason(), p.feedback(), value);
1210 case BasicBlock::kThrow:
1211 DCHECK_EQ(IrOpcode::kThrow, input->opcode());
1214 case BasicBlock::kNone: {
1223 if (trace_turbo_ == kEnableTraceTurboJson && input) {
1224 int instruction_start =
static_cast<int>(instructions_.size());
1225 instr_origins_[input->id()] = {instruction_start, instruction_end};
1229 void InstructionSelector::MarkPairProjectionsAsWord32(Node* node) {
1230 Node* projection0 = NodeProperties::FindProjection(node, 0);
1232 MarkAsWord32(projection0);
1234 Node* projection1 = NodeProperties::FindProjection(node, 1);
1236 MarkAsWord32(projection1);
1240 void InstructionSelector::VisitNode(Node* node) {
1241 DCHECK_NOT_NULL(schedule()->block(node));
1242 switch (node->opcode()) {
1243 case IrOpcode::kStart:
1244 case IrOpcode::kLoop:
1245 case IrOpcode::kEnd:
1246 case IrOpcode::kBranch:
1247 case IrOpcode::kIfTrue:
1248 case IrOpcode::kIfFalse:
1249 case IrOpcode::kIfSuccess:
1250 case IrOpcode::kSwitch:
1251 case IrOpcode::kIfValue:
1252 case IrOpcode::kIfDefault:
1253 case IrOpcode::kEffectPhi:
1254 case IrOpcode::kMerge:
1255 case IrOpcode::kTerminate:
1256 case IrOpcode::kBeginRegion:
1259 case IrOpcode::kIfException:
1260 return MarkAsReference(node), VisitIfException(node);
1261 case IrOpcode::kFinishRegion:
1262 return MarkAsReference(node), VisitFinishRegion(node);
1263 case IrOpcode::kParameter: {
1265 linkage()->GetParameterType(ParameterIndexOf(node->op()));
1266 MarkAsRepresentation(type.representation(), node);
1267 return VisitParameter(node);
1269 case IrOpcode::kOsrValue:
1270 return MarkAsReference(node), VisitOsrValue(node);
1271 case IrOpcode::kPhi: {
1272 MachineRepresentation rep = PhiRepresentationOf(node->op());
1273 if (rep == MachineRepresentation::kNone)
return;
1274 MarkAsRepresentation(rep, node);
1275 return VisitPhi(node);
1277 case IrOpcode::kProjection:
1278 return VisitProjection(node);
1279 case IrOpcode::kInt32Constant:
1280 case IrOpcode::kInt64Constant:
1281 case IrOpcode::kExternalConstant:
1282 case IrOpcode::kRelocatableInt32Constant:
1283 case IrOpcode::kRelocatableInt64Constant:
1284 return VisitConstant(node);
1285 case IrOpcode::kFloat32Constant:
1286 return MarkAsFloat32(node), VisitConstant(node);
1287 case IrOpcode::kFloat64Constant:
1288 return MarkAsFloat64(node), VisitConstant(node);
1289 case IrOpcode::kHeapConstant:
1290 return MarkAsReference(node), VisitConstant(node);
1291 case IrOpcode::kNumberConstant: {
1292 double value = OpParameter<double>(node->op());
1293 if (!IsSmiDouble(value)) MarkAsReference(node);
1294 return VisitConstant(node);
1296 case IrOpcode::kDelayedStringConstant:
1297 return MarkAsReference(node), VisitConstant(node);
1298 case IrOpcode::kCall:
1299 return VisitCall(node);
1300 case IrOpcode::kCallWithCallerSavedRegisters:
1301 return VisitCallWithCallerSavedRegisters(node);
1302 case IrOpcode::kDeoptimizeIf:
1303 return VisitDeoptimizeIf(node);
1304 case IrOpcode::kDeoptimizeUnless:
1305 return VisitDeoptimizeUnless(node);
1306 case IrOpcode::kTrapIf:
1307 return VisitTrapIf(node, TrapIdOf(node->op()));
1308 case IrOpcode::kTrapUnless:
1309 return VisitTrapUnless(node, TrapIdOf(node->op()));
1310 case IrOpcode::kFrameState:
1311 case IrOpcode::kStateValues:
1312 case IrOpcode::kObjectState:
1314 case IrOpcode::kDebugAbort:
1315 VisitDebugAbort(node);
1317 case IrOpcode::kDebugBreak:
1318 VisitDebugBreak(node);
1320 case IrOpcode::kUnreachable:
1321 VisitUnreachable(node);
1323 case IrOpcode::kDeadValue:
1324 VisitDeadValue(node);
1326 case IrOpcode::kComment:
1329 case IrOpcode::kRetain:
1332 case IrOpcode::kLoad: {
1333 LoadRepresentation type = LoadRepresentationOf(node->op());
1334 MarkAsRepresentation(type.representation(), node);
1335 return VisitLoad(node);
1337 case IrOpcode::kPoisonedLoad: {
1338 LoadRepresentation type = LoadRepresentationOf(node->op());
1339 MarkAsRepresentation(type.representation(), node);
1340 return VisitPoisonedLoad(node);
1342 case IrOpcode::kStore:
1343 return VisitStore(node);
1344 case IrOpcode::kProtectedStore:
1345 return VisitProtectedStore(node);
1346 case IrOpcode::kWord32And:
1347 return MarkAsWord32(node), VisitWord32And(node);
1348 case IrOpcode::kWord32Or:
1349 return MarkAsWord32(node), VisitWord32Or(node);
1350 case IrOpcode::kWord32Xor:
1351 return MarkAsWord32(node), VisitWord32Xor(node);
1352 case IrOpcode::kWord32Shl:
1353 return MarkAsWord32(node), VisitWord32Shl(node);
1354 case IrOpcode::kWord32Shr:
1355 return MarkAsWord32(node), VisitWord32Shr(node);
1356 case IrOpcode::kWord32Sar:
1357 return MarkAsWord32(node), VisitWord32Sar(node);
1358 case IrOpcode::kWord32Ror:
1359 return MarkAsWord32(node), VisitWord32Ror(node);
1360 case IrOpcode::kWord32Equal:
1361 return VisitWord32Equal(node);
1362 case IrOpcode::kWord32Clz:
1363 return MarkAsWord32(node), VisitWord32Clz(node);
1364 case IrOpcode::kWord32Ctz:
1365 return MarkAsWord32(node), VisitWord32Ctz(node);
1366 case IrOpcode::kWord32ReverseBits:
1367 return MarkAsWord32(node), VisitWord32ReverseBits(node);
1368 case IrOpcode::kWord32ReverseBytes:
1369 return MarkAsWord32(node), VisitWord32ReverseBytes(node);
1370 case IrOpcode::kInt32AbsWithOverflow:
1371 return MarkAsWord32(node), VisitInt32AbsWithOverflow(node);
1372 case IrOpcode::kWord32Popcnt:
1373 return MarkAsWord32(node), VisitWord32Popcnt(node);
1374 case IrOpcode::kWord64Popcnt:
1375 return MarkAsWord32(node), VisitWord64Popcnt(node);
1376 case IrOpcode::kWord64And:
1377 return MarkAsWord64(node), VisitWord64And(node);
1378 case IrOpcode::kWord64Or:
1379 return MarkAsWord64(node), VisitWord64Or(node);
1380 case IrOpcode::kWord64Xor:
1381 return MarkAsWord64(node), VisitWord64Xor(node);
1382 case IrOpcode::kWord64Shl:
1383 return MarkAsWord64(node), VisitWord64Shl(node);
1384 case IrOpcode::kWord64Shr:
1385 return MarkAsWord64(node), VisitWord64Shr(node);
1386 case IrOpcode::kWord64Sar:
1387 return MarkAsWord64(node), VisitWord64Sar(node);
1388 case IrOpcode::kWord64Ror:
1389 return MarkAsWord64(node), VisitWord64Ror(node);
1390 case IrOpcode::kWord64Clz:
1391 return MarkAsWord64(node), VisitWord64Clz(node);
1392 case IrOpcode::kWord64Ctz:
1393 return MarkAsWord64(node), VisitWord64Ctz(node);
1394 case IrOpcode::kWord64ReverseBits:
1395 return MarkAsWord64(node), VisitWord64ReverseBits(node);
1396 case IrOpcode::kWord64ReverseBytes:
1397 return MarkAsWord64(node), VisitWord64ReverseBytes(node);
1398 case IrOpcode::kInt64AbsWithOverflow:
1399 return MarkAsWord64(node), VisitInt64AbsWithOverflow(node);
1400 case IrOpcode::kWord64Equal:
1401 return VisitWord64Equal(node);
1402 case IrOpcode::kInt32Add:
1403 return MarkAsWord32(node), VisitInt32Add(node);
1404 case IrOpcode::kInt32AddWithOverflow:
1405 return MarkAsWord32(node), VisitInt32AddWithOverflow(node);
1406 case IrOpcode::kInt32Sub:
1407 return MarkAsWord32(node), VisitInt32Sub(node);
1408 case IrOpcode::kInt32SubWithOverflow:
1409 return VisitInt32SubWithOverflow(node);
1410 case IrOpcode::kInt32Mul:
1411 return MarkAsWord32(node), VisitInt32Mul(node);
1412 case IrOpcode::kInt32MulWithOverflow:
1413 return MarkAsWord32(node), VisitInt32MulWithOverflow(node);
1414 case IrOpcode::kInt32MulHigh:
1415 return VisitInt32MulHigh(node);
1416 case IrOpcode::kInt32Div:
1417 return MarkAsWord32(node), VisitInt32Div(node);
1418 case IrOpcode::kInt32Mod:
1419 return MarkAsWord32(node), VisitInt32Mod(node);
1420 case IrOpcode::kInt32LessThan:
1421 return VisitInt32LessThan(node);
1422 case IrOpcode::kInt32LessThanOrEqual:
1423 return VisitInt32LessThanOrEqual(node);
1424 case IrOpcode::kUint32Div:
1425 return MarkAsWord32(node), VisitUint32Div(node);
1426 case IrOpcode::kUint32LessThan:
1427 return VisitUint32LessThan(node);
1428 case IrOpcode::kUint32LessThanOrEqual:
1429 return VisitUint32LessThanOrEqual(node);
1430 case IrOpcode::kUint32Mod:
1431 return MarkAsWord32(node), VisitUint32Mod(node);
1432 case IrOpcode::kUint32MulHigh:
1433 return VisitUint32MulHigh(node);
1434 case IrOpcode::kInt64Add:
1435 return MarkAsWord64(node), VisitInt64Add(node);
1436 case IrOpcode::kInt64AddWithOverflow:
1437 return MarkAsWord64(node), VisitInt64AddWithOverflow(node);
1438 case IrOpcode::kInt64Sub:
1439 return MarkAsWord64(node), VisitInt64Sub(node);
1440 case IrOpcode::kInt64SubWithOverflow:
1441 return MarkAsWord64(node), VisitInt64SubWithOverflow(node);
1442 case IrOpcode::kInt64Mul:
1443 return MarkAsWord64(node), VisitInt64Mul(node);
1444 case IrOpcode::kInt64Div:
1445 return MarkAsWord64(node), VisitInt64Div(node);
1446 case IrOpcode::kInt64Mod:
1447 return MarkAsWord64(node), VisitInt64Mod(node);
1448 case IrOpcode::kInt64LessThan:
1449 return VisitInt64LessThan(node);
1450 case IrOpcode::kInt64LessThanOrEqual:
1451 return VisitInt64LessThanOrEqual(node);
1452 case IrOpcode::kUint64Div:
1453 return MarkAsWord64(node), VisitUint64Div(node);
1454 case IrOpcode::kUint64LessThan:
1455 return VisitUint64LessThan(node);
1456 case IrOpcode::kUint64LessThanOrEqual:
1457 return VisitUint64LessThanOrEqual(node);
1458 case IrOpcode::kUint64Mod:
1459 return MarkAsWord64(node), VisitUint64Mod(node);
1460 case IrOpcode::kBitcastTaggedToWord:
1461 return MarkAsRepresentation(MachineType::PointerRepresentation(), node),
1462 VisitBitcastTaggedToWord(node);
1463 case IrOpcode::kBitcastWordToTagged:
1464 return MarkAsReference(node), VisitBitcastWordToTagged(node);
1465 case IrOpcode::kBitcastWordToTaggedSigned:
1466 return MarkAsRepresentation(MachineRepresentation::kTaggedSigned, node),
1468 case IrOpcode::kChangeFloat32ToFloat64:
1469 return MarkAsFloat64(node), VisitChangeFloat32ToFloat64(node);
1470 case IrOpcode::kChangeInt32ToFloat64:
1471 return MarkAsFloat64(node), VisitChangeInt32ToFloat64(node);
1472 case IrOpcode::kChangeInt64ToFloat64:
1473 return MarkAsFloat64(node), VisitChangeInt64ToFloat64(node);
1474 case IrOpcode::kChangeUint32ToFloat64:
1475 return MarkAsFloat64(node), VisitChangeUint32ToFloat64(node);
1476 case IrOpcode::kChangeFloat64ToInt32:
1477 return MarkAsWord32(node), VisitChangeFloat64ToInt32(node);
1478 case IrOpcode::kChangeFloat64ToInt64:
1479 return MarkAsWord64(node), VisitChangeFloat64ToInt64(node);
1480 case IrOpcode::kChangeFloat64ToUint32:
1481 return MarkAsWord32(node), VisitChangeFloat64ToUint32(node);
1482 case IrOpcode::kChangeFloat64ToUint64:
1483 return MarkAsWord64(node), VisitChangeFloat64ToUint64(node);
1484 case IrOpcode::kFloat64SilenceNaN:
1485 MarkAsFloat64(node);
1486 if (CanProduceSignalingNaN(node->InputAt(0))) {
1487 return VisitFloat64SilenceNaN(node);
1489 return EmitIdentity(node);
1491 case IrOpcode::kTruncateFloat64ToInt64:
1492 return MarkAsWord64(node), VisitTruncateFloat64ToInt64(node);
1493 case IrOpcode::kTruncateFloat64ToUint32:
1494 return MarkAsWord32(node), VisitTruncateFloat64ToUint32(node);
1495 case IrOpcode::kTruncateFloat32ToInt32:
1496 return MarkAsWord32(node), VisitTruncateFloat32ToInt32(node);
1497 case IrOpcode::kTruncateFloat32ToUint32:
1498 return MarkAsWord32(node), VisitTruncateFloat32ToUint32(node);
1499 case IrOpcode::kTryTruncateFloat32ToInt64:
1500 return MarkAsWord64(node), VisitTryTruncateFloat32ToInt64(node);
1501 case IrOpcode::kTryTruncateFloat64ToInt64:
1502 return MarkAsWord64(node), VisitTryTruncateFloat64ToInt64(node);
1503 case IrOpcode::kTryTruncateFloat32ToUint64:
1504 return MarkAsWord64(node), VisitTryTruncateFloat32ToUint64(node);
1505 case IrOpcode::kTryTruncateFloat64ToUint64:
1506 return MarkAsWord64(node), VisitTryTruncateFloat64ToUint64(node);
1507 case IrOpcode::kChangeInt32ToInt64:
1508 return MarkAsWord64(node), VisitChangeInt32ToInt64(node);
1509 case IrOpcode::kChangeUint32ToUint64:
1510 return MarkAsWord64(node), VisitChangeUint32ToUint64(node);
1511 case IrOpcode::kTruncateFloat64ToFloat32:
1512 return MarkAsFloat32(node), VisitTruncateFloat64ToFloat32(node);
1513 case IrOpcode::kTruncateFloat64ToWord32:
1514 return MarkAsWord32(node), VisitTruncateFloat64ToWord32(node);
1515 case IrOpcode::kTruncateInt64ToInt32:
1516 return MarkAsWord32(node), VisitTruncateInt64ToInt32(node);
1517 case IrOpcode::kRoundFloat64ToInt32:
1518 return MarkAsWord32(node), VisitRoundFloat64ToInt32(node);
1519 case IrOpcode::kRoundInt64ToFloat32:
1520 return MarkAsFloat32(node), VisitRoundInt64ToFloat32(node);
1521 case IrOpcode::kRoundInt32ToFloat32:
1522 return MarkAsFloat32(node), VisitRoundInt32ToFloat32(node);
1523 case IrOpcode::kRoundInt64ToFloat64:
1524 return MarkAsFloat64(node), VisitRoundInt64ToFloat64(node);
1525 case IrOpcode::kBitcastFloat32ToInt32:
1526 return MarkAsWord32(node), VisitBitcastFloat32ToInt32(node);
1527 case IrOpcode::kRoundUint32ToFloat32:
1528 return MarkAsFloat32(node), VisitRoundUint32ToFloat32(node);
1529 case IrOpcode::kRoundUint64ToFloat32:
1530 return MarkAsFloat64(node), VisitRoundUint64ToFloat32(node);
1531 case IrOpcode::kRoundUint64ToFloat64:
1532 return MarkAsFloat64(node), VisitRoundUint64ToFloat64(node);
1533 case IrOpcode::kBitcastFloat64ToInt64:
1534 return MarkAsWord64(node), VisitBitcastFloat64ToInt64(node);
1535 case IrOpcode::kBitcastInt32ToFloat32:
1536 return MarkAsFloat32(node), VisitBitcastInt32ToFloat32(node);
1537 case IrOpcode::kBitcastInt64ToFloat64:
1538 return MarkAsFloat64(node), VisitBitcastInt64ToFloat64(node);
1539 case IrOpcode::kFloat32Add:
1540 return MarkAsFloat32(node), VisitFloat32Add(node);
1541 case IrOpcode::kFloat32Sub:
1542 return MarkAsFloat32(node), VisitFloat32Sub(node);
1543 case IrOpcode::kFloat32Neg:
1544 return MarkAsFloat32(node), VisitFloat32Neg(node);
1545 case IrOpcode::kFloat32Mul:
1546 return MarkAsFloat32(node), VisitFloat32Mul(node);
1547 case IrOpcode::kFloat32Div:
1548 return MarkAsFloat32(node), VisitFloat32Div(node);
1549 case IrOpcode::kFloat32Abs:
1550 return MarkAsFloat32(node), VisitFloat32Abs(node);
1551 case IrOpcode::kFloat32Sqrt:
1552 return MarkAsFloat32(node), VisitFloat32Sqrt(node);
1553 case IrOpcode::kFloat32Equal:
1554 return VisitFloat32Equal(node);
1555 case IrOpcode::kFloat32LessThan:
1556 return VisitFloat32LessThan(node);
1557 case IrOpcode::kFloat32LessThanOrEqual:
1558 return VisitFloat32LessThanOrEqual(node);
1559 case IrOpcode::kFloat32Max:
1560 return MarkAsFloat32(node), VisitFloat32Max(node);
1561 case IrOpcode::kFloat32Min:
1562 return MarkAsFloat32(node), VisitFloat32Min(node);
1563 case IrOpcode::kFloat64Add:
1564 return MarkAsFloat64(node), VisitFloat64Add(node);
1565 case IrOpcode::kFloat64Sub:
1566 return MarkAsFloat64(node), VisitFloat64Sub(node);
1567 case IrOpcode::kFloat64Neg:
1568 return MarkAsFloat64(node), VisitFloat64Neg(node);
1569 case IrOpcode::kFloat64Mul:
1570 return MarkAsFloat64(node), VisitFloat64Mul(node);
1571 case IrOpcode::kFloat64Div:
1572 return MarkAsFloat64(node), VisitFloat64Div(node);
1573 case IrOpcode::kFloat64Mod:
1574 return MarkAsFloat64(node), VisitFloat64Mod(node);
1575 case IrOpcode::kFloat64Min:
1576 return MarkAsFloat64(node), VisitFloat64Min(node);
1577 case IrOpcode::kFloat64Max:
1578 return MarkAsFloat64(node), VisitFloat64Max(node);
1579 case IrOpcode::kFloat64Abs:
1580 return MarkAsFloat64(node), VisitFloat64Abs(node);
1581 case IrOpcode::kFloat64Acos:
1582 return MarkAsFloat64(node), VisitFloat64Acos(node);
1583 case IrOpcode::kFloat64Acosh:
1584 return MarkAsFloat64(node), VisitFloat64Acosh(node);
1585 case IrOpcode::kFloat64Asin:
1586 return MarkAsFloat64(node), VisitFloat64Asin(node);
1587 case IrOpcode::kFloat64Asinh:
1588 return MarkAsFloat64(node), VisitFloat64Asinh(node);
1589 case IrOpcode::kFloat64Atan:
1590 return MarkAsFloat64(node), VisitFloat64Atan(node);
1591 case IrOpcode::kFloat64Atanh:
1592 return MarkAsFloat64(node), VisitFloat64Atanh(node);
1593 case IrOpcode::kFloat64Atan2:
1594 return MarkAsFloat64(node), VisitFloat64Atan2(node);
1595 case IrOpcode::kFloat64Cbrt:
1596 return MarkAsFloat64(node), VisitFloat64Cbrt(node);
1597 case IrOpcode::kFloat64Cos:
1598 return MarkAsFloat64(node), VisitFloat64Cos(node);
1599 case IrOpcode::kFloat64Cosh:
1600 return MarkAsFloat64(node), VisitFloat64Cosh(node);
1601 case IrOpcode::kFloat64Exp:
1602 return MarkAsFloat64(node), VisitFloat64Exp(node);
1603 case IrOpcode::kFloat64Expm1:
1604 return MarkAsFloat64(node), VisitFloat64Expm1(node);
1605 case IrOpcode::kFloat64Log:
1606 return MarkAsFloat64(node), VisitFloat64Log(node);
1607 case IrOpcode::kFloat64Log1p:
1608 return MarkAsFloat64(node), VisitFloat64Log1p(node);
1609 case IrOpcode::kFloat64Log10:
1610 return MarkAsFloat64(node), VisitFloat64Log10(node);
1611 case IrOpcode::kFloat64Log2:
1612 return MarkAsFloat64(node), VisitFloat64Log2(node);
1613 case IrOpcode::kFloat64Pow:
1614 return MarkAsFloat64(node), VisitFloat64Pow(node);
1615 case IrOpcode::kFloat64Sin:
1616 return MarkAsFloat64(node), VisitFloat64Sin(node);
1617 case IrOpcode::kFloat64Sinh:
1618 return MarkAsFloat64(node), VisitFloat64Sinh(node);
1619 case IrOpcode::kFloat64Sqrt:
1620 return MarkAsFloat64(node), VisitFloat64Sqrt(node);
1621 case IrOpcode::kFloat64Tan:
1622 return MarkAsFloat64(node), VisitFloat64Tan(node);
1623 case IrOpcode::kFloat64Tanh:
1624 return MarkAsFloat64(node), VisitFloat64Tanh(node);
1625 case IrOpcode::kFloat64Equal:
1626 return VisitFloat64Equal(node);
1627 case IrOpcode::kFloat64LessThan:
1628 return VisitFloat64LessThan(node);
1629 case IrOpcode::kFloat64LessThanOrEqual:
1630 return VisitFloat64LessThanOrEqual(node);
1631 case IrOpcode::kFloat32RoundDown:
1632 return MarkAsFloat32(node), VisitFloat32RoundDown(node);
1633 case IrOpcode::kFloat64RoundDown:
1634 return MarkAsFloat64(node), VisitFloat64RoundDown(node);
1635 case IrOpcode::kFloat32RoundUp:
1636 return MarkAsFloat32(node), VisitFloat32RoundUp(node);
1637 case IrOpcode::kFloat64RoundUp:
1638 return MarkAsFloat64(node), VisitFloat64RoundUp(node);
1639 case IrOpcode::kFloat32RoundTruncate:
1640 return MarkAsFloat32(node), VisitFloat32RoundTruncate(node);
1641 case IrOpcode::kFloat64RoundTruncate:
1642 return MarkAsFloat64(node), VisitFloat64RoundTruncate(node);
1643 case IrOpcode::kFloat64RoundTiesAway:
1644 return MarkAsFloat64(node), VisitFloat64RoundTiesAway(node);
1645 case IrOpcode::kFloat32RoundTiesEven:
1646 return MarkAsFloat32(node), VisitFloat32RoundTiesEven(node);
1647 case IrOpcode::kFloat64RoundTiesEven:
1648 return MarkAsFloat64(node), VisitFloat64RoundTiesEven(node);
1649 case IrOpcode::kFloat64ExtractLowWord32:
1650 return MarkAsWord32(node), VisitFloat64ExtractLowWord32(node);
1651 case IrOpcode::kFloat64ExtractHighWord32:
1652 return MarkAsWord32(node), VisitFloat64ExtractHighWord32(node);
1653 case IrOpcode::kFloat64InsertLowWord32:
1654 return MarkAsFloat64(node), VisitFloat64InsertLowWord32(node);
1655 case IrOpcode::kFloat64InsertHighWord32:
1656 return MarkAsFloat64(node), VisitFloat64InsertHighWord32(node);
1657 case IrOpcode::kTaggedPoisonOnSpeculation:
1658 return MarkAsReference(node), VisitTaggedPoisonOnSpeculation(node);
1659 case IrOpcode::kWord32PoisonOnSpeculation:
1660 return MarkAsWord32(node), VisitWord32PoisonOnSpeculation(node);
1661 case IrOpcode::kWord64PoisonOnSpeculation:
1662 return MarkAsWord64(node), VisitWord64PoisonOnSpeculation(node);
1663 case IrOpcode::kStackSlot:
1664 return VisitStackSlot(node);
1665 case IrOpcode::kLoadStackPointer:
1666 return VisitLoadStackPointer(node);
1667 case IrOpcode::kLoadFramePointer:
1668 return VisitLoadFramePointer(node);
1669 case IrOpcode::kLoadParentFramePointer:
1670 return VisitLoadParentFramePointer(node);
1671 case IrOpcode::kUnalignedLoad: {
1672 LoadRepresentation type = LoadRepresentationOf(node->op());
1673 MarkAsRepresentation(type.representation(), node);
1674 return VisitUnalignedLoad(node);
1676 case IrOpcode::kUnalignedStore:
1677 return VisitUnalignedStore(node);
1678 case IrOpcode::kInt32PairAdd:
1680 MarkPairProjectionsAsWord32(node);
1681 return VisitInt32PairAdd(node);
1682 case IrOpcode::kInt32PairSub:
1684 MarkPairProjectionsAsWord32(node);
1685 return VisitInt32PairSub(node);
1686 case IrOpcode::kInt32PairMul:
1688 MarkPairProjectionsAsWord32(node);
1689 return VisitInt32PairMul(node);
1690 case IrOpcode::kWord32PairShl:
1692 MarkPairProjectionsAsWord32(node);
1693 return VisitWord32PairShl(node);
1694 case IrOpcode::kWord32PairShr:
1696 MarkPairProjectionsAsWord32(node);
1697 return VisitWord32PairShr(node);
1698 case IrOpcode::kWord32PairSar:
1700 MarkPairProjectionsAsWord32(node);
1701 return VisitWord32PairSar(node);
1702 case IrOpcode::kWord32AtomicLoad: {
1703 LoadRepresentation type = LoadRepresentationOf(node->op());
1704 MarkAsRepresentation(type.representation(), node);
1705 return VisitWord32AtomicLoad(node);
1707 case IrOpcode::kWord64AtomicLoad: {
1708 LoadRepresentation type = LoadRepresentationOf(node->op());
1709 MarkAsRepresentation(type.representation(), node);
1710 return VisitWord64AtomicLoad(node);
1712 case IrOpcode::kWord32AtomicStore:
1713 return VisitWord32AtomicStore(node);
1714 case IrOpcode::kWord64AtomicStore:
1715 return VisitWord64AtomicStore(node);
1716 case IrOpcode::kWord32AtomicPairStore:
1717 return VisitWord32AtomicPairStore(node);
1718 case IrOpcode::kWord32AtomicPairLoad: {
1720 MarkPairProjectionsAsWord32(node);
1721 return VisitWord32AtomicPairLoad(node);
1723 #define ATOMIC_CASE(name, rep) \ 1724 case IrOpcode::k##rep##Atomic##name: { \ 1725 MachineType type = AtomicOpType(node->op()); \ 1726 MarkAsRepresentation(type.representation(), node); \ 1727 return Visit##rep##Atomic##name(node); \ 1729 ATOMIC_CASE(Add, Word32)
1730 ATOMIC_CASE(Add, Word64)
1731 ATOMIC_CASE(Sub, Word32)
1732 ATOMIC_CASE(Sub, Word64)
1733 ATOMIC_CASE(And, Word32)
1734 ATOMIC_CASE(And, Word64)
1735 ATOMIC_CASE(Or, Word32)
1736 ATOMIC_CASE(Or, Word64)
1737 ATOMIC_CASE(Xor, Word32)
1738 ATOMIC_CASE(Xor, Word64)
1739 ATOMIC_CASE(Exchange, Word32)
1740 ATOMIC_CASE(Exchange, Word64)
1741 ATOMIC_CASE(CompareExchange, Word32)
1742 ATOMIC_CASE(CompareExchange, Word64)
1744 #define ATOMIC_CASE(name) \ 1745 case IrOpcode::kWord32AtomicPair##name: { \ 1746 MarkAsWord32(node); \ 1747 MarkPairProjectionsAsWord32(node); \ 1748 return VisitWord32AtomicPair##name(node); \ 1755 ATOMIC_CASE(Exchange)
1756 ATOMIC_CASE(CompareExchange)
1758 case IrOpcode::kSpeculationFence:
1759 return VisitSpeculationFence(node);
1760 case IrOpcode::kProtectedLoad: {
1761 LoadRepresentation type = LoadRepresentationOf(node->op());
1762 MarkAsRepresentation(type.representation(), node);
1763 return VisitProtectedLoad(node);
1765 case IrOpcode::kSignExtendWord8ToInt32:
1766 return MarkAsWord32(node), VisitSignExtendWord8ToInt32(node);
1767 case IrOpcode::kSignExtendWord16ToInt32:
1768 return MarkAsWord32(node), VisitSignExtendWord16ToInt32(node);
1769 case IrOpcode::kSignExtendWord8ToInt64:
1770 return MarkAsWord64(node), VisitSignExtendWord8ToInt64(node);
1771 case IrOpcode::kSignExtendWord16ToInt64:
1772 return MarkAsWord64(node), VisitSignExtendWord16ToInt64(node);
1773 case IrOpcode::kSignExtendWord32ToInt64:
1774 return MarkAsWord64(node), VisitSignExtendWord32ToInt64(node);
1775 case IrOpcode::kUnsafePointerAdd:
1776 MarkAsRepresentation(MachineType::PointerRepresentation(), node);
1777 return VisitUnsafePointerAdd(node);
1778 case IrOpcode::kF32x4Splat:
1779 return MarkAsSimd128(node), VisitF32x4Splat(node);
1780 case IrOpcode::kF32x4ExtractLane:
1781 return MarkAsFloat32(node), VisitF32x4ExtractLane(node);
1782 case IrOpcode::kF32x4ReplaceLane:
1783 return MarkAsSimd128(node), VisitF32x4ReplaceLane(node);
1784 case IrOpcode::kF32x4SConvertI32x4:
1785 return MarkAsSimd128(node), VisitF32x4SConvertI32x4(node);
1786 case IrOpcode::kF32x4UConvertI32x4:
1787 return MarkAsSimd128(node), VisitF32x4UConvertI32x4(node);
1788 case IrOpcode::kF32x4Abs:
1789 return MarkAsSimd128(node), VisitF32x4Abs(node);
1790 case IrOpcode::kF32x4Neg:
1791 return MarkAsSimd128(node), VisitF32x4Neg(node);
1792 case IrOpcode::kF32x4RecipApprox:
1793 return MarkAsSimd128(node), VisitF32x4RecipApprox(node);
1794 case IrOpcode::kF32x4RecipSqrtApprox:
1795 return MarkAsSimd128(node), VisitF32x4RecipSqrtApprox(node);
1796 case IrOpcode::kF32x4Add:
1797 return MarkAsSimd128(node), VisitF32x4Add(node);
1798 case IrOpcode::kF32x4AddHoriz:
1799 return MarkAsSimd128(node), VisitF32x4AddHoriz(node);
1800 case IrOpcode::kF32x4Sub:
1801 return MarkAsSimd128(node), VisitF32x4Sub(node);
1802 case IrOpcode::kF32x4Mul:
1803 return MarkAsSimd128(node), VisitF32x4Mul(node);
1804 case IrOpcode::kF32x4Min:
1805 return MarkAsSimd128(node), VisitF32x4Min(node);
1806 case IrOpcode::kF32x4Max:
1807 return MarkAsSimd128(node), VisitF32x4Max(node);
1808 case IrOpcode::kF32x4Eq:
1809 return MarkAsSimd128(node), VisitF32x4Eq(node);
1810 case IrOpcode::kF32x4Ne:
1811 return MarkAsSimd128(node), VisitF32x4Ne(node);
1812 case IrOpcode::kF32x4Lt:
1813 return MarkAsSimd128(node), VisitF32x4Lt(node);
1814 case IrOpcode::kF32x4Le:
1815 return MarkAsSimd128(node), VisitF32x4Le(node);
1816 case IrOpcode::kI32x4Splat:
1817 return MarkAsSimd128(node), VisitI32x4Splat(node);
1818 case IrOpcode::kI32x4ExtractLane:
1819 return MarkAsWord32(node), VisitI32x4ExtractLane(node);
1820 case IrOpcode::kI32x4ReplaceLane:
1821 return MarkAsSimd128(node), VisitI32x4ReplaceLane(node);
1822 case IrOpcode::kI32x4SConvertF32x4:
1823 return MarkAsSimd128(node), VisitI32x4SConvertF32x4(node);
1824 case IrOpcode::kI32x4SConvertI16x8Low:
1825 return MarkAsSimd128(node), VisitI32x4SConvertI16x8Low(node);
1826 case IrOpcode::kI32x4SConvertI16x8High:
1827 return MarkAsSimd128(node), VisitI32x4SConvertI16x8High(node);
1828 case IrOpcode::kI32x4Neg:
1829 return MarkAsSimd128(node), VisitI32x4Neg(node);
1830 case IrOpcode::kI32x4Shl:
1831 return MarkAsSimd128(node), VisitI32x4Shl(node);
1832 case IrOpcode::kI32x4ShrS:
1833 return MarkAsSimd128(node), VisitI32x4ShrS(node);
1834 case IrOpcode::kI32x4Add:
1835 return MarkAsSimd128(node), VisitI32x4Add(node);
1836 case IrOpcode::kI32x4AddHoriz:
1837 return MarkAsSimd128(node), VisitI32x4AddHoriz(node);
1838 case IrOpcode::kI32x4Sub:
1839 return MarkAsSimd128(node), VisitI32x4Sub(node);
1840 case IrOpcode::kI32x4Mul:
1841 return MarkAsSimd128(node), VisitI32x4Mul(node);
1842 case IrOpcode::kI32x4MinS:
1843 return MarkAsSimd128(node), VisitI32x4MinS(node);
1844 case IrOpcode::kI32x4MaxS:
1845 return MarkAsSimd128(node), VisitI32x4MaxS(node);
1846 case IrOpcode::kI32x4Eq:
1847 return MarkAsSimd128(node), VisitI32x4Eq(node);
1848 case IrOpcode::kI32x4Ne:
1849 return MarkAsSimd128(node), VisitI32x4Ne(node);
1850 case IrOpcode::kI32x4GtS:
1851 return MarkAsSimd128(node), VisitI32x4GtS(node);
1852 case IrOpcode::kI32x4GeS:
1853 return MarkAsSimd128(node), VisitI32x4GeS(node);
1854 case IrOpcode::kI32x4UConvertF32x4:
1855 return MarkAsSimd128(node), VisitI32x4UConvertF32x4(node);
1856 case IrOpcode::kI32x4UConvertI16x8Low:
1857 return MarkAsSimd128(node), VisitI32x4UConvertI16x8Low(node);
1858 case IrOpcode::kI32x4UConvertI16x8High:
1859 return MarkAsSimd128(node), VisitI32x4UConvertI16x8High(node);
1860 case IrOpcode::kI32x4ShrU:
1861 return MarkAsSimd128(node), VisitI32x4ShrU(node);
1862 case IrOpcode::kI32x4MinU:
1863 return MarkAsSimd128(node), VisitI32x4MinU(node);
1864 case IrOpcode::kI32x4MaxU:
1865 return MarkAsSimd128(node), VisitI32x4MaxU(node);
1866 case IrOpcode::kI32x4GtU:
1867 return MarkAsSimd128(node), VisitI32x4GtU(node);
1868 case IrOpcode::kI32x4GeU:
1869 return MarkAsSimd128(node), VisitI32x4GeU(node);
1870 case IrOpcode::kI16x8Splat:
1871 return MarkAsSimd128(node), VisitI16x8Splat(node);
1872 case IrOpcode::kI16x8ExtractLane:
1873 return MarkAsWord32(node), VisitI16x8ExtractLane(node);
1874 case IrOpcode::kI16x8ReplaceLane:
1875 return MarkAsSimd128(node), VisitI16x8ReplaceLane(node);
1876 case IrOpcode::kI16x8SConvertI8x16Low:
1877 return MarkAsSimd128(node), VisitI16x8SConvertI8x16Low(node);
1878 case IrOpcode::kI16x8SConvertI8x16High:
1879 return MarkAsSimd128(node), VisitI16x8SConvertI8x16High(node);
1880 case IrOpcode::kI16x8Neg:
1881 return MarkAsSimd128(node), VisitI16x8Neg(node);
1882 case IrOpcode::kI16x8Shl:
1883 return MarkAsSimd128(node), VisitI16x8Shl(node);
1884 case IrOpcode::kI16x8ShrS:
1885 return MarkAsSimd128(node), VisitI16x8ShrS(node);
1886 case IrOpcode::kI16x8SConvertI32x4:
1887 return MarkAsSimd128(node), VisitI16x8SConvertI32x4(node);
1888 case IrOpcode::kI16x8Add:
1889 return MarkAsSimd128(node), VisitI16x8Add(node);
1890 case IrOpcode::kI16x8AddSaturateS:
1891 return MarkAsSimd128(node), VisitI16x8AddSaturateS(node);
1892 case IrOpcode::kI16x8AddHoriz:
1893 return MarkAsSimd128(node), VisitI16x8AddHoriz(node);
1894 case IrOpcode::kI16x8Sub:
1895 return MarkAsSimd128(node), VisitI16x8Sub(node);
1896 case IrOpcode::kI16x8SubSaturateS:
1897 return MarkAsSimd128(node), VisitI16x8SubSaturateS(node);
1898 case IrOpcode::kI16x8Mul:
1899 return MarkAsSimd128(node), VisitI16x8Mul(node);
1900 case IrOpcode::kI16x8MinS:
1901 return MarkAsSimd128(node), VisitI16x8MinS(node);
1902 case IrOpcode::kI16x8MaxS:
1903 return MarkAsSimd128(node), VisitI16x8MaxS(node);
1904 case IrOpcode::kI16x8Eq:
1905 return MarkAsSimd128(node), VisitI16x8Eq(node);
1906 case IrOpcode::kI16x8Ne:
1907 return MarkAsSimd128(node), VisitI16x8Ne(node);
1908 case IrOpcode::kI16x8GtS:
1909 return MarkAsSimd128(node), VisitI16x8GtS(node);
1910 case IrOpcode::kI16x8GeS:
1911 return MarkAsSimd128(node), VisitI16x8GeS(node);
1912 case IrOpcode::kI16x8UConvertI8x16Low:
1913 return MarkAsSimd128(node), VisitI16x8UConvertI8x16Low(node);
1914 case IrOpcode::kI16x8UConvertI8x16High:
1915 return MarkAsSimd128(node), VisitI16x8UConvertI8x16High(node);
1916 case IrOpcode::kI16x8ShrU:
1917 return MarkAsSimd128(node), VisitI16x8ShrU(node);
1918 case IrOpcode::kI16x8UConvertI32x4:
1919 return MarkAsSimd128(node), VisitI16x8UConvertI32x4(node);
1920 case IrOpcode::kI16x8AddSaturateU:
1921 return MarkAsSimd128(node), VisitI16x8AddSaturateU(node);
1922 case IrOpcode::kI16x8SubSaturateU:
1923 return MarkAsSimd128(node), VisitI16x8SubSaturateU(node);
1924 case IrOpcode::kI16x8MinU:
1925 return MarkAsSimd128(node), VisitI16x8MinU(node);
1926 case IrOpcode::kI16x8MaxU:
1927 return MarkAsSimd128(node), VisitI16x8MaxU(node);
1928 case IrOpcode::kI16x8GtU:
1929 return MarkAsSimd128(node), VisitI16x8GtU(node);
1930 case IrOpcode::kI16x8GeU:
1931 return MarkAsSimd128(node), VisitI16x8GeU(node);
1932 case IrOpcode::kI8x16Splat:
1933 return MarkAsSimd128(node), VisitI8x16Splat(node);
1934 case IrOpcode::kI8x16ExtractLane:
1935 return MarkAsWord32(node), VisitI8x16ExtractLane(node);
1936 case IrOpcode::kI8x16ReplaceLane:
1937 return MarkAsSimd128(node), VisitI8x16ReplaceLane(node);
1938 case IrOpcode::kI8x16Neg:
1939 return MarkAsSimd128(node), VisitI8x16Neg(node);
1940 case IrOpcode::kI8x16Shl:
1941 return MarkAsSimd128(node), VisitI8x16Shl(node);
1942 case IrOpcode::kI8x16ShrS:
1943 return MarkAsSimd128(node), VisitI8x16ShrS(node);
1944 case IrOpcode::kI8x16SConvertI16x8:
1945 return MarkAsSimd128(node), VisitI8x16SConvertI16x8(node);
1946 case IrOpcode::kI8x16Add:
1947 return MarkAsSimd128(node), VisitI8x16Add(node);
1948 case IrOpcode::kI8x16AddSaturateS:
1949 return MarkAsSimd128(node), VisitI8x16AddSaturateS(node);
1950 case IrOpcode::kI8x16Sub:
1951 return MarkAsSimd128(node), VisitI8x16Sub(node);
1952 case IrOpcode::kI8x16SubSaturateS:
1953 return MarkAsSimd128(node), VisitI8x16SubSaturateS(node);
1954 case IrOpcode::kI8x16Mul:
1955 return MarkAsSimd128(node), VisitI8x16Mul(node);
1956 case IrOpcode::kI8x16MinS:
1957 return MarkAsSimd128(node), VisitI8x16MinS(node);
1958 case IrOpcode::kI8x16MaxS:
1959 return MarkAsSimd128(node), VisitI8x16MaxS(node);
1960 case IrOpcode::kI8x16Eq:
1961 return MarkAsSimd128(node), VisitI8x16Eq(node);
1962 case IrOpcode::kI8x16Ne:
1963 return MarkAsSimd128(node), VisitI8x16Ne(node);
1964 case IrOpcode::kI8x16GtS:
1965 return MarkAsSimd128(node), VisitI8x16GtS(node);
1966 case IrOpcode::kI8x16GeS:
1967 return MarkAsSimd128(node), VisitI8x16GeS(node);
1968 case IrOpcode::kI8x16ShrU:
1969 return MarkAsSimd128(node), VisitI8x16ShrU(node);
1970 case IrOpcode::kI8x16UConvertI16x8:
1971 return MarkAsSimd128(node), VisitI8x16UConvertI16x8(node);
1972 case IrOpcode::kI8x16AddSaturateU:
1973 return MarkAsSimd128(node), VisitI8x16AddSaturateU(node);
1974 case IrOpcode::kI8x16SubSaturateU:
1975 return MarkAsSimd128(node), VisitI8x16SubSaturateU(node);
1976 case IrOpcode::kI8x16MinU:
1977 return MarkAsSimd128(node), VisitI8x16MinU(node);
1978 case IrOpcode::kI8x16MaxU:
1979 return MarkAsSimd128(node), VisitI8x16MaxU(node);
1980 case IrOpcode::kI8x16GtU:
1981 return MarkAsSimd128(node), VisitI8x16GtU(node);
1982 case IrOpcode::kI8x16GeU:
1983 return MarkAsSimd128(node), VisitI16x8GeU(node);
1984 case IrOpcode::kS128Zero:
1985 return MarkAsSimd128(node), VisitS128Zero(node);
1986 case IrOpcode::kS128And:
1987 return MarkAsSimd128(node), VisitS128And(node);
1988 case IrOpcode::kS128Or:
1989 return MarkAsSimd128(node), VisitS128Or(node);
1990 case IrOpcode::kS128Xor:
1991 return MarkAsSimd128(node), VisitS128Xor(node);
1992 case IrOpcode::kS128Not:
1993 return MarkAsSimd128(node), VisitS128Not(node);
1994 case IrOpcode::kS128Select:
1995 return MarkAsSimd128(node), VisitS128Select(node);
1996 case IrOpcode::kS8x16Shuffle:
1997 return MarkAsSimd128(node), VisitS8x16Shuffle(node);
1998 case IrOpcode::kS1x4AnyTrue:
1999 return MarkAsWord32(node), VisitS1x4AnyTrue(node);
2000 case IrOpcode::kS1x4AllTrue:
2001 return MarkAsWord32(node), VisitS1x4AllTrue(node);
2002 case IrOpcode::kS1x8AnyTrue:
2003 return MarkAsWord32(node), VisitS1x8AnyTrue(node);
2004 case IrOpcode::kS1x8AllTrue:
2005 return MarkAsWord32(node), VisitS1x8AllTrue(node);
2006 case IrOpcode::kS1x16AnyTrue:
2007 return MarkAsWord32(node), VisitS1x16AnyTrue(node);
2008 case IrOpcode::kS1x16AllTrue:
2009 return MarkAsWord32(node), VisitS1x16AllTrue(node);
2011 FATAL(
"Unexpected operator #%d:%s @ node #%d", node->opcode(),
2012 node->op()->mnemonic(), node->id());
2017 void InstructionSelector::EmitWordPoisonOnSpeculation(Node* node) {
2018 if (poisoning_level_ != PoisoningMitigationLevel::kDontPoison) {
2019 OperandGenerator g(
this);
2020 Node* input_node = NodeProperties::GetValueInput(node, 0);
2021 InstructionOperand input = g.UseRegister(input_node);
2022 InstructionOperand output = g.DefineSameAsFirst(node);
2023 Emit(kArchWordPoisonOnSpeculation, output, input);
2029 void InstructionSelector::VisitWord32PoisonOnSpeculation(Node* node) {
2030 EmitWordPoisonOnSpeculation(node);
2033 void InstructionSelector::VisitWord64PoisonOnSpeculation(Node* node) {
2034 EmitWordPoisonOnSpeculation(node);
2037 void InstructionSelector::VisitTaggedPoisonOnSpeculation(Node* node) {
2038 EmitWordPoisonOnSpeculation(node);
2041 void InstructionSelector::VisitLoadStackPointer(Node* node) {
2042 OperandGenerator g(
this);
2043 Emit(kArchStackPointer, g.DefineAsRegister(node));
2046 void InstructionSelector::VisitLoadFramePointer(Node* node) {
2047 OperandGenerator g(
this);
2048 Emit(kArchFramePointer, g.DefineAsRegister(node));
2051 void InstructionSelector::VisitLoadParentFramePointer(Node* node) {
2052 OperandGenerator g(
this);
2053 Emit(kArchParentFramePointer, g.DefineAsRegister(node));
2056 void InstructionSelector::VisitFloat64Acos(Node* node) {
2057 VisitFloat64Ieee754Unop(node, kIeee754Float64Acos);
2060 void InstructionSelector::VisitFloat64Acosh(Node* node) {
2061 VisitFloat64Ieee754Unop(node, kIeee754Float64Acosh);
2064 void InstructionSelector::VisitFloat64Asin(Node* node) {
2065 VisitFloat64Ieee754Unop(node, kIeee754Float64Asin);
2068 void InstructionSelector::VisitFloat64Asinh(Node* node) {
2069 VisitFloat64Ieee754Unop(node, kIeee754Float64Asinh);
2072 void InstructionSelector::VisitFloat64Atan(Node* node) {
2073 VisitFloat64Ieee754Unop(node, kIeee754Float64Atan);
2076 void InstructionSelector::VisitFloat64Atanh(Node* node) {
2077 VisitFloat64Ieee754Unop(node, kIeee754Float64Atanh);
2080 void InstructionSelector::VisitFloat64Atan2(Node* node) {
2081 VisitFloat64Ieee754Binop(node, kIeee754Float64Atan2);
2084 void InstructionSelector::VisitFloat64Cbrt(Node* node) {
2085 VisitFloat64Ieee754Unop(node, kIeee754Float64Cbrt);
2088 void InstructionSelector::VisitFloat64Cos(Node* node) {
2089 VisitFloat64Ieee754Unop(node, kIeee754Float64Cos);
2092 void InstructionSelector::VisitFloat64Cosh(Node* node) {
2093 VisitFloat64Ieee754Unop(node, kIeee754Float64Cosh);
2096 void InstructionSelector::VisitFloat64Exp(Node* node) {
2097 VisitFloat64Ieee754Unop(node, kIeee754Float64Exp);
2100 void InstructionSelector::VisitFloat64Expm1(Node* node) {
2101 VisitFloat64Ieee754Unop(node, kIeee754Float64Expm1);
2104 void InstructionSelector::VisitFloat64Log(Node* node) {
2105 VisitFloat64Ieee754Unop(node, kIeee754Float64Log);
2108 void InstructionSelector::VisitFloat64Log1p(Node* node) {
2109 VisitFloat64Ieee754Unop(node, kIeee754Float64Log1p);
2112 void InstructionSelector::VisitFloat64Log2(Node* node) {
2113 VisitFloat64Ieee754Unop(node, kIeee754Float64Log2);
2116 void InstructionSelector::VisitFloat64Log10(Node* node) {
2117 VisitFloat64Ieee754Unop(node, kIeee754Float64Log10);
2120 void InstructionSelector::VisitFloat64Pow(Node* node) {
2121 VisitFloat64Ieee754Binop(node, kIeee754Float64Pow);
2124 void InstructionSelector::VisitFloat64Sin(Node* node) {
2125 VisitFloat64Ieee754Unop(node, kIeee754Float64Sin);
2128 void InstructionSelector::VisitFloat64Sinh(Node* node) {
2129 VisitFloat64Ieee754Unop(node, kIeee754Float64Sinh);
2132 void InstructionSelector::VisitFloat64Tan(Node* node) {
2133 VisitFloat64Ieee754Unop(node, kIeee754Float64Tan);
2136 void InstructionSelector::VisitFloat64Tanh(Node* node) {
2137 VisitFloat64Ieee754Unop(node, kIeee754Float64Tanh);
2140 void InstructionSelector::EmitTableSwitch(
const SwitchInfo& sw,
2141 InstructionOperand& index_operand) {
2142 OperandGenerator g(
this);
2143 size_t input_count = 2 + sw.value_range();
2144 DCHECK_LE(sw.value_range(), std::numeric_limits<size_t>::max() - 2);
2145 auto* inputs = zone()->NewArray<InstructionOperand>(input_count);
2146 inputs[0] = index_operand;
2147 InstructionOperand default_operand = g.Label(sw.default_branch());
2148 std::fill(&inputs[1], &inputs[input_count], default_operand);
2149 for (
const CaseInfo& c : sw.CasesUnsorted()) {
2150 size_t value = c.value - sw.min_value();
2151 DCHECK_LE(0u, value);
2152 DCHECK_LT(value + 2, input_count);
2153 inputs[value + 2] = g.Label(c.branch);
2155 Emit(kArchTableSwitch, 0,
nullptr, input_count, inputs, 0,
nullptr);
2158 void InstructionSelector::EmitLookupSwitch(
const SwitchInfo& sw,
2159 InstructionOperand& value_operand) {
2160 OperandGenerator g(
this);
2161 std::vector<CaseInfo> cases = sw.CasesSortedByOriginalOrder();
2162 size_t input_count = 2 + sw.case_count() * 2;
2163 DCHECK_LE(sw.case_count(), (std::numeric_limits<size_t>::max() - 2) / 2);
2164 auto* inputs = zone()->NewArray<InstructionOperand>(input_count);
2165 inputs[0] = value_operand;
2166 inputs[1] = g.Label(sw.default_branch());
2167 for (
size_t index = 0; index < cases.size(); ++index) {
2168 const CaseInfo& c = cases[index];
2169 inputs[index * 2 + 2 + 0] = g.TempImmediate(c.value);
2170 inputs[index * 2 + 2 + 1] = g.Label(c.branch);
2172 Emit(kArchLookupSwitch, 0,
nullptr, input_count, inputs, 0,
nullptr);
2175 void InstructionSelector::EmitBinarySearchSwitch(
2176 const SwitchInfo& sw, InstructionOperand& value_operand) {
2177 OperandGenerator g(
this);
2178 size_t input_count = 2 + sw.case_count() * 2;
2179 DCHECK_LE(sw.case_count(), (std::numeric_limits<size_t>::max() - 2) / 2);
2180 auto* inputs = zone()->NewArray<InstructionOperand>(input_count);
2181 inputs[0] = value_operand;
2182 inputs[1] = g.Label(sw.default_branch());
2183 std::vector<CaseInfo> cases = sw.CasesSortedByValue();
2184 std::stable_sort(cases.begin(), cases.end(),
2185 [](CaseInfo a, CaseInfo b) {
return a.value < b.value; });
2186 for (
size_t index = 0; index < cases.size(); ++index) {
2187 const CaseInfo& c = cases[index];
2188 inputs[index * 2 + 2 + 0] = g.TempImmediate(c.value);
2189 inputs[index * 2 + 2 + 1] = g.Label(c.branch);
2191 Emit(kArchBinarySearchSwitch, 0,
nullptr, input_count, inputs, 0,
nullptr);
2194 void InstructionSelector::VisitBitcastTaggedToWord(Node* node) {
2198 void InstructionSelector::VisitBitcastWordToTagged(Node* node) {
2199 OperandGenerator g(
this);
2200 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(node->InputAt(0)));
2204 #if V8_TARGET_ARCH_32_BIT 2206 void InstructionSelector::VisitWord64And(Node* node) { UNIMPLEMENTED(); }
2208 void InstructionSelector::VisitWord64Or(Node* node) { UNIMPLEMENTED(); }
2210 void InstructionSelector::VisitWord64Xor(Node* node) { UNIMPLEMENTED(); }
2212 void InstructionSelector::VisitWord64Shl(Node* node) { UNIMPLEMENTED(); }
2214 void InstructionSelector::VisitWord64Shr(Node* node) { UNIMPLEMENTED(); }
2216 void InstructionSelector::VisitWord64Sar(Node* node) { UNIMPLEMENTED(); }
2218 void InstructionSelector::VisitWord64Ror(Node* node) { UNIMPLEMENTED(); }
2220 void InstructionSelector::VisitWord64Clz(Node* node) { UNIMPLEMENTED(); }
2222 void InstructionSelector::VisitWord64Ctz(Node* node) { UNIMPLEMENTED(); }
2224 void InstructionSelector::VisitWord64ReverseBits(Node* node) {
2228 void InstructionSelector::VisitWord64Popcnt(Node* node) { UNIMPLEMENTED(); }
2230 void InstructionSelector::VisitWord64Equal(Node* node) { UNIMPLEMENTED(); }
2232 void InstructionSelector::VisitInt64Add(Node* node) { UNIMPLEMENTED(); }
2234 void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
2238 void InstructionSelector::VisitInt64Sub(Node* node) { UNIMPLEMENTED(); }
2240 void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
2244 void InstructionSelector::VisitInt64Mul(Node* node) { UNIMPLEMENTED(); }
2246 void InstructionSelector::VisitInt64Div(Node* node) { UNIMPLEMENTED(); }
2248 void InstructionSelector::VisitInt64LessThan(Node* node) { UNIMPLEMENTED(); }
2250 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
2254 void InstructionSelector::VisitUint64Div(Node* node) { UNIMPLEMENTED(); }
2256 void InstructionSelector::VisitInt64Mod(Node* node) { UNIMPLEMENTED(); }
2258 void InstructionSelector::VisitUint64LessThan(Node* node) { UNIMPLEMENTED(); }
2260 void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
2264 void InstructionSelector::VisitUint64Mod(Node* node) { UNIMPLEMENTED(); }
2266 void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
2270 void InstructionSelector::VisitChangeInt64ToFloat64(Node* node) {
2274 void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
2278 void InstructionSelector::VisitChangeFloat64ToInt64(Node* node) {
2282 void InstructionSelector::VisitChangeFloat64ToUint64(Node* node) {
2286 void InstructionSelector::VisitTruncateFloat64ToInt64(Node* node) {
2290 void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
2294 void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
2298 void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
2302 void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
2306 void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
2310 void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
2314 void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
2318 void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
2322 void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
2326 void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
2330 void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
2334 void InstructionSelector::VisitSignExtendWord8ToInt64(Node* node) {
2338 void InstructionSelector::VisitSignExtendWord16ToInt64(Node* node) {
2342 void InstructionSelector::VisitSignExtendWord32ToInt64(Node* node) {
2345 #endif // V8_TARGET_ARCH_32_BIT 2348 #if V8_TARGET_ARCH_64_BIT 2349 void InstructionSelector::VisitInt32PairAdd(Node* node) { UNIMPLEMENTED(); }
2351 void InstructionSelector::VisitInt32PairSub(Node* node) { UNIMPLEMENTED(); }
2353 void InstructionSelector::VisitInt32PairMul(Node* node) { UNIMPLEMENTED(); }
2355 void InstructionSelector::VisitWord32PairShl(Node* node) { UNIMPLEMENTED(); }
2357 void InstructionSelector::VisitWord32PairShr(Node* node) { UNIMPLEMENTED(); }
2359 void InstructionSelector::VisitWord32PairSar(Node* node) { UNIMPLEMENTED(); }
2360 #endif // V8_TARGET_ARCH_64_BIT 2362 #if !V8_TARGET_ARCH_IA32 && !V8_TARGET_ARCH_ARM && !V8_TARGET_ARCH_MIPS 2363 void InstructionSelector::VisitWord32AtomicPairLoad(Node* node) {
2367 void InstructionSelector::VisitWord32AtomicPairStore(Node* node) {
2371 void InstructionSelector::VisitWord32AtomicPairAdd(Node* node) {
2375 void InstructionSelector::VisitWord32AtomicPairSub(Node* node) {
2379 void InstructionSelector::VisitWord32AtomicPairAnd(Node* node) {
2383 void InstructionSelector::VisitWord32AtomicPairOr(Node* node) {
2387 void InstructionSelector::VisitWord32AtomicPairXor(Node* node) {
2391 void InstructionSelector::VisitWord32AtomicPairExchange(Node* node) {
2395 void InstructionSelector::VisitWord32AtomicPairCompareExchange(Node* node) {
2398 #endif // !V8_TARGET_ARCH_IA32 && !V8_TARGET_ARCH_ARM && !V8_TARGET_ARCH_MIPS 2400 #if !V8_TARGET_ARCH_X64 && !V8_TARGET_ARCH_ARM64 && !V8_TARGET_ARCH_MIPS64 && \ 2401 !V8_TARGET_ARCH_S390 && !V8_TARGET_ARCH_PPC 2402 void InstructionSelector::VisitWord64AtomicLoad(Node* node) { UNIMPLEMENTED(); }
2404 void InstructionSelector::VisitWord64AtomicStore(Node* node) {
2408 void InstructionSelector::VisitWord64AtomicAdd(Node* node) { UNIMPLEMENTED(); }
2410 void InstructionSelector::VisitWord64AtomicSub(Node* node) { UNIMPLEMENTED(); }
2412 void InstructionSelector::VisitWord64AtomicAnd(Node* node) { UNIMPLEMENTED(); }
2414 void InstructionSelector::VisitWord64AtomicOr(Node* node) { UNIMPLEMENTED(); }
2416 void InstructionSelector::VisitWord64AtomicXor(Node* node) { UNIMPLEMENTED(); }
2418 void InstructionSelector::VisitWord64AtomicExchange(Node* node) {
2422 void InstructionSelector::VisitWord64AtomicCompareExchange(Node* node) {
2425 #endif // !V8_TARGET_ARCH_X64 && !V8_TARGET_ARCH_ARM64 && !V8_TARGET_ARCH_PPC 2428 #if !V8_TARGET_ARCH_ARM && !V8_TARGET_ARCH_ARM64 && !V8_TARGET_ARCH_MIPS && \ 2429 !V8_TARGET_ARCH_MIPS64 && !V8_TARGET_ARCH_IA32 2430 void InstructionSelector::VisitS8x16Shuffle(Node* node) { UNIMPLEMENTED(); }
2431 #endif // !V8_TARGET_ARCH_ARM && !V8_TARGET_ARCH_ARM64 && !V8_TARGET_ARCH_MIPS 2434 void InstructionSelector::VisitFinishRegion(Node* node) { EmitIdentity(node); }
2436 void InstructionSelector::VisitParameter(Node* node) {
2437 OperandGenerator g(
this);
2438 int index = ParameterIndexOf(node->op());
2439 InstructionOperand op =
2440 linkage()->ParameterHasSecondaryLocation(index)
2441 ? g.DefineAsDualLocation(
2442 node, linkage()->GetParameterLocation(index),
2443 linkage()->GetParameterSecondaryLocation(index))
2444 : g.DefineAsLocation(node, linkage()->GetParameterLocation(index));
2450 LinkageLocation ExceptionLocation() {
2451 return LinkageLocation::ForRegister(kReturnRegister0.code(),
2452 MachineType::IntPtr());
2456 void InstructionSelector::VisitIfException(Node* node) {
2457 OperandGenerator g(
this);
2458 DCHECK_EQ(IrOpcode::kCall, node->InputAt(1)->opcode());
2459 Emit(kArchNop, g.DefineAsLocation(node, ExceptionLocation()));
2462 void InstructionSelector::VisitOsrValue(Node* node) {
2463 OperandGenerator g(
this);
2464 int index = OsrValueIndexOf(node->op());
2466 g.DefineAsLocation(node, linkage()->GetOsrValueLocation(index)));
2469 void InstructionSelector::VisitPhi(Node* node) {
2470 const int input_count = node->op()->ValueInputCount();
2471 DCHECK_EQ(input_count, current_block_->PredecessorCount());
2472 PhiInstruction* phi =
new (instruction_zone())
2473 PhiInstruction(instruction_zone(), GetVirtualRegister(node),
2474 static_cast<size_t>(input_count));
2476 ->InstructionBlockAt(RpoNumber::FromInt(current_block_->rpo_number()))
2478 for (
int i = 0;
i < input_count; ++
i) {
2479 Node*
const input = node->InputAt(
i);
2481 phi->SetInput(static_cast<size_t>(
i), GetVirtualRegister(input));
2485 void InstructionSelector::VisitProjection(Node* node) {
2486 OperandGenerator g(
this);
2487 Node* value = node->InputAt(0);
2488 switch (value->opcode()) {
2489 case IrOpcode::kInt32AddWithOverflow:
2490 case IrOpcode::kInt32SubWithOverflow:
2491 case IrOpcode::kInt32MulWithOverflow:
2492 case IrOpcode::kInt64AddWithOverflow:
2493 case IrOpcode::kInt64SubWithOverflow:
2494 case IrOpcode::kTryTruncateFloat32ToInt64:
2495 case IrOpcode::kTryTruncateFloat64ToInt64:
2496 case IrOpcode::kTryTruncateFloat32ToUint64:
2497 case IrOpcode::kTryTruncateFloat64ToUint64:
2498 case IrOpcode::kInt32PairAdd:
2499 case IrOpcode::kInt32PairSub:
2500 case IrOpcode::kInt32PairMul:
2501 case IrOpcode::kWord32PairShl:
2502 case IrOpcode::kWord32PairShr:
2503 case IrOpcode::kWord32PairSar:
2504 case IrOpcode::kInt32AbsWithOverflow:
2505 case IrOpcode::kInt64AbsWithOverflow:
2506 if (ProjectionIndexOf(node->op()) == 0u) {
2507 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
2509 DCHECK_EQ(1u, ProjectionIndexOf(node->op()));
2518 void InstructionSelector::VisitConstant(Node* node) {
2521 OperandGenerator g(
this);
2522 Emit(kArchNop, g.DefineAsConstant(node));
2525 void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
2526 OperandGenerator g(
this);
2527 auto call_descriptor = CallDescriptorOf(node->op());
2529 FrameStateDescriptor* frame_state_descriptor =
nullptr;
2530 if (call_descriptor->NeedsFrameState()) {
2531 frame_state_descriptor = GetFrameStateDescriptor(
2532 node->InputAt(static_cast<int>(call_descriptor->InputCount())));
2535 CallBuffer buffer(zone(), call_descriptor, frame_state_descriptor);
2536 CallDescriptor::Flags flags = call_descriptor->flags();
2543 CallBufferFlags call_buffer_flags(kCallCodeImmediate | kCallAddressImmediate);
2544 if (flags & CallDescriptor::kAllowCallThroughSlot) {
2547 call_buffer_flags |= kAllowCallThroughSlot;
2548 #ifndef V8_TARGET_ARCH_32_BIT 2553 InitializeCallBuffer(node, &buffer, call_buffer_flags,
false);
2555 EmitPrepareArguments(&(buffer.pushed_nodes), call_descriptor, node);
2559 DCHECK_EQ(IrOpcode::kIfException, handler->front()->opcode());
2560 flags |= CallDescriptor::kHasExceptionHandler;
2561 buffer.instruction_args.push_back(g.Label(handler));
2565 InstructionCode opcode = kArchNop;
2566 switch (call_descriptor->kind()) {
2567 case CallDescriptor::kCallAddress:
2568 opcode = kArchCallCFunction | MiscField::encode(static_cast<int>(
2569 call_descriptor->ParameterCount()));
2571 case CallDescriptor::kCallCodeObject:
2572 opcode = kArchCallCodeObject | MiscField::encode(flags);
2574 case CallDescriptor::kCallJSFunction:
2575 opcode = kArchCallJSFunction | MiscField::encode(flags);
2577 case CallDescriptor::kCallWasmFunction:
2578 case CallDescriptor::kCallWasmImportWrapper:
2579 opcode = kArchCallWasmFunction | MiscField::encode(flags);
2584 size_t const output_count = buffer.outputs.size();
2585 auto* outputs = output_count ? &buffer.outputs.front() :
nullptr;
2586 Instruction* call_instr =
2587 Emit(opcode, output_count, outputs, buffer.instruction_args.size(),
2588 &buffer.instruction_args.front());
2589 if (instruction_selection_failed())
return;
2590 call_instr->MarkAsCall();
2592 EmitPrepareResults(&(buffer.output_nodes), call_descriptor, node);
2595 void InstructionSelector::VisitCallWithCallerSavedRegisters(
2596 Node* node, BasicBlock* handler) {
2597 OperandGenerator g(
this);
2598 const auto fp_mode = CallDescriptorOf(node->op())->get_save_fp_mode();
2599 Emit(kArchSaveCallerRegisters | MiscField::encode(static_cast<int>(fp_mode)),
2601 VisitCall(node, handler);
2602 Emit(kArchRestoreCallerRegisters |
2603 MiscField::encode(static_cast<int>(fp_mode)),
2607 void InstructionSelector::VisitTailCall(Node* node) {
2608 OperandGenerator g(
this);
2609 auto call_descriptor = CallDescriptorOf(node->op());
2611 CallDescriptor* caller = linkage()->GetIncomingDescriptor();
2612 DCHECK(caller->CanTailCall(node));
2613 const CallDescriptor* callee = CallDescriptorOf(node->op());
2614 int stack_param_delta = callee->GetStackParameterDelta(caller);
2615 CallBuffer buffer(zone(), call_descriptor,
nullptr);
2618 CallBufferFlags flags(kCallCodeImmediate | kCallTail);
2619 if (IsTailCallAddressImmediate()) {
2620 flags |= kCallAddressImmediate;
2622 if (callee->flags() & CallDescriptor::kFixedTargetRegister) {
2623 flags |= kCallFixedTargetRegister;
2625 DCHECK_EQ(callee->flags() & CallDescriptor::kAllowCallThroughSlot, 0);
2626 InitializeCallBuffer(node, &buffer, flags,
true, stack_param_delta);
2629 InstructionCode opcode;
2630 InstructionOperandVector temps(zone());
2631 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
2632 switch (call_descriptor->kind()) {
2633 case CallDescriptor::kCallCodeObject:
2634 opcode = kArchTailCallCodeObjectFromJSFunction;
2640 int temps_count = GetTempsCountForTailCallFromJSFunction();
2641 for (
int i = 0;
i < temps_count;
i++) {
2642 temps.push_back(g.TempRegister());
2645 switch (call_descriptor->kind()) {
2646 case CallDescriptor::kCallCodeObject:
2647 opcode = kArchTailCallCodeObject;
2649 case CallDescriptor::kCallAddress:
2650 opcode = kArchTailCallAddress;
2652 case CallDescriptor::kCallWasmFunction:
2653 opcode = kArchTailCallWasm;
2660 opcode |= MiscField::encode(call_descriptor->flags());
2662 Emit(kArchPrepareTailCall, g.NoOutput());
2669 int optional_padding_slot = callee->GetFirstUnusedStackSlot();
2670 buffer.instruction_args.push_back(g.TempImmediate(optional_padding_slot));
2672 int first_unused_stack_slot =
2673 (V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? true :
false) +
2675 buffer.instruction_args.push_back(g.TempImmediate(first_unused_stack_slot));
2678 Emit(opcode, 0,
nullptr, buffer.instruction_args.size(),
2679 &buffer.instruction_args.front(), temps.size(),
2680 temps.empty() ? nullptr : &temps.front());
2683 void InstructionSelector::VisitGoto(BasicBlock* target) {
2685 OperandGenerator g(
this);
2686 Emit(kArchJmp, g.NoOutput(), g.Label(target));
2689 void InstructionSelector::VisitReturn(Node* ret) {
2690 OperandGenerator g(
this);
2691 const int input_count = linkage()->GetIncomingDescriptor()->ReturnCount() == 0
2693 : ret->op()->ValueInputCount();
2694 DCHECK_GE(input_count, 1);
2695 auto value_locations = zone()->NewArray<InstructionOperand>(input_count);
2696 Node* pop_count = ret->InputAt(0);
2697 value_locations[0] = (pop_count->opcode() == IrOpcode::kInt32Constant ||
2698 pop_count->opcode() == IrOpcode::kInt64Constant)
2699 ? g.UseImmediate(pop_count)
2700 : g.UseRegister(pop_count);
2701 for (
int i = 1;
i < input_count; ++
i) {
2702 value_locations[
i] =
2703 g.UseLocation(ret->InputAt(
i), linkage()->GetReturnLocation(
i - 1));
2705 Emit(kArchRet, 0,
nullptr, input_count, value_locations);
2708 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
2709 BasicBlock* fbranch) {
2710 if (NeedsPoisoning(IsSafetyCheckOf(branch->op()))) {
2711 FlagsContinuation cont =
2712 FlagsContinuation::ForBranchAndPoison(kNotEqual, tbranch, fbranch);
2713 VisitWordCompareZero(branch, branch->InputAt(0), &cont);
2715 FlagsContinuation cont =
2716 FlagsContinuation::ForBranch(kNotEqual, tbranch, fbranch);
2717 VisitWordCompareZero(branch, branch->InputAt(0), &cont);
2721 void InstructionSelector::VisitDeoptimizeIf(Node* node) {
2722 DeoptimizeParameters p = DeoptimizeParametersOf(node->op());
2723 if (NeedsPoisoning(p.is_safety_check())) {
2724 FlagsContinuation cont = FlagsContinuation::ForDeoptimizeAndPoison(
2725 kNotEqual, p.kind(), p.reason(), p.feedback(), node->InputAt(1));
2726 VisitWordCompareZero(node, node->InputAt(0), &cont);
2728 FlagsContinuation cont = FlagsContinuation::ForDeoptimize(
2729 kNotEqual, p.kind(), p.reason(), p.feedback(), node->InputAt(1));
2730 VisitWordCompareZero(node, node->InputAt(0), &cont);
2734 void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
2735 DeoptimizeParameters p = DeoptimizeParametersOf(node->op());
2736 if (NeedsPoisoning(p.is_safety_check())) {
2737 FlagsContinuation cont = FlagsContinuation::ForDeoptimizeAndPoison(
2738 kEqual, p.kind(), p.reason(), p.feedback(), node->InputAt(1));
2739 VisitWordCompareZero(node, node->InputAt(0), &cont);
2741 FlagsContinuation cont = FlagsContinuation::ForDeoptimize(
2742 kEqual, p.kind(), p.reason(), p.feedback(), node->InputAt(1));
2743 VisitWordCompareZero(node, node->InputAt(0), &cont);
2747 void InstructionSelector::VisitTrapIf(Node* node, TrapId trap_id) {
2748 FlagsContinuation cont =
2749 FlagsContinuation::ForTrap(kNotEqual, trap_id, node->InputAt(1));
2750 VisitWordCompareZero(node, node->InputAt(0), &cont);
2753 void InstructionSelector::VisitTrapUnless(Node* node, TrapId trap_id) {
2754 FlagsContinuation cont =
2755 FlagsContinuation::ForTrap(kEqual, trap_id, node->InputAt(1));
2756 VisitWordCompareZero(node, node->InputAt(0), &cont);
2759 void InstructionSelector::EmitIdentity(Node* node) {
2760 OperandGenerator g(
this);
2761 MarkAsUsed(node->InputAt(0));
2762 SetRename(node, node->InputAt(0));
2765 void InstructionSelector::VisitDeoptimize(DeoptimizeKind kind,
2766 DeoptimizeReason reason,
2767 VectorSlotPair
const& feedback,
2769 EmitDeoptimize(kArchDeoptimize, 0,
nullptr, 0,
nullptr, kind, reason,
2773 void InstructionSelector::VisitThrow(Node* node) {
2774 OperandGenerator g(
this);
2775 Emit(kArchThrowTerminator, g.NoOutput());
2778 void InstructionSelector::VisitDebugBreak(Node* node) {
2779 OperandGenerator g(
this);
2780 Emit(kArchDebugBreak, g.NoOutput());
2783 void InstructionSelector::VisitUnreachable(Node* node) {
2784 OperandGenerator g(
this);
2785 Emit(kArchDebugBreak, g.NoOutput());
2788 void InstructionSelector::VisitDeadValue(Node* node) {
2789 OperandGenerator g(
this);
2790 MarkAsRepresentation(DeadValueRepresentationOf(node->op()), node);
2791 Emit(kArchDebugBreak, g.DefineAsConstant(node));
2794 void InstructionSelector::VisitComment(Node* node) {
2795 OperandGenerator g(
this);
2796 InstructionOperand operand(g.UseImmediate(node));
2797 Emit(kArchComment, 0,
nullptr, 1, &operand);
2800 void InstructionSelector::VisitUnsafePointerAdd(Node* node) {
2801 #if V8_TARGET_ARCH_64_BIT 2802 VisitInt64Add(node);
2803 #else // V8_TARGET_ARCH_64_BIT 2804 VisitInt32Add(node);
2805 #endif // V8_TARGET_ARCH_64_BIT 2808 void InstructionSelector::VisitRetain(Node* node) {
2809 OperandGenerator g(
this);
2810 Emit(kArchNop, g.NoOutput(), g.UseAny(node->InputAt(0)));
2813 bool InstructionSelector::CanProduceSignalingNaN(Node* node) {
2815 if (node->opcode() == IrOpcode::kFloat64Add ||
2816 node->opcode() == IrOpcode::kFloat64Sub ||
2817 node->opcode() == IrOpcode::kFloat64Mul) {
2823 FrameStateDescriptor* InstructionSelector::GetFrameStateDescriptor(
2825 DCHECK_EQ(IrOpcode::kFrameState, state->opcode());
2826 DCHECK_EQ(kFrameStateInputCount, state->InputCount());
2827 FrameStateInfo state_info = FrameStateInfoOf(state->op());
2829 int parameters =
static_cast<int>(
2830 StateValuesAccess(state->InputAt(kFrameStateParametersInput)).size());
2831 int locals =
static_cast<int>(
2832 StateValuesAccess(state->InputAt(kFrameStateLocalsInput)).size());
2833 int stack =
static_cast<int>(
2834 StateValuesAccess(state->InputAt(kFrameStateStackInput)).size());
2836 DCHECK_EQ(parameters, state_info.parameter_count());
2837 DCHECK_EQ(locals, state_info.local_count());
2839 FrameStateDescriptor* outer_state =
nullptr;
2840 Node* outer_node = state->InputAt(kFrameStateOuterStateInput);
2841 if (outer_node->opcode() == IrOpcode::kFrameState) {
2842 outer_state = GetFrameStateDescriptor(outer_node);
2845 return new (instruction_zone()) FrameStateDescriptor(
2846 instruction_zone(), state_info.type(), state_info.bailout_id(),
2847 state_info.state_combine(), parameters, locals, stack,
2848 state_info.shared_info(), outer_state);
2852 void InstructionSelector::CanonicalizeShuffle(
bool inputs_equal,
2856 *needs_swap =
false;
2862 bool src0_is_used =
false;
2863 bool src1_is_used =
false;
2864 for (
int i = 0;
i < kSimd128Size; ++
i) {
2865 if (shuffle[
i] < kSimd128Size) {
2866 src0_is_used =
true;
2868 src1_is_used =
true;
2871 if (src0_is_used && !src1_is_used) {
2873 }
else if (src1_is_used && !src0_is_used) {
2877 *is_swizzle =
false;
2881 if (shuffle[0] >= kSimd128Size) {
2884 for (
int i = 0;
i < kSimd128Size; ++
i) {
2885 shuffle[
i] ^= kSimd128Size;
2891 for (
int i = 0;
i < kSimd128Size; ++
i) shuffle[
i] &= kSimd128Size - 1;
2895 void InstructionSelector::CanonicalizeShuffle(Node* node, uint8_t* shuffle,
2898 memcpy(shuffle, OpParameter<uint8_t*>(node->op()), kSimd128Size);
2900 bool inputs_equal = GetVirtualRegister(node->InputAt(0)) ==
2901 GetVirtualRegister(node->InputAt(1));
2902 CanonicalizeShuffle(inputs_equal, shuffle, &needs_swap, is_swizzle);
2904 SwapShuffleInputs(node);
2909 node->ReplaceInput(1, node->InputAt(0));
2914 void InstructionSelector::SwapShuffleInputs(Node* node) {
2915 Node* input0 = node->InputAt(0);
2916 Node* input1 = node->InputAt(1);
2917 node->ReplaceInput(0, input1);
2918 node->ReplaceInput(1, input0);
2922 bool InstructionSelector::TryMatchIdentity(
const uint8_t* shuffle) {
2923 for (
int i = 0;
i < kSimd128Size; ++
i) {
2924 if (shuffle[
i] !=
i)
return false;
2930 bool InstructionSelector::TryMatch32x4Shuffle(
const uint8_t* shuffle,
2931 uint8_t* shuffle32x4) {
2932 for (
int i = 0;
i < 4; ++
i) {
2933 if (shuffle[
i * 4] % 4 != 0)
return false;
2934 for (
int j = 1; j < 4; ++j) {
2935 if (shuffle[
i * 4 + j] - shuffle[
i * 4 + j - 1] != 1)
return false;
2937 shuffle32x4[
i] = shuffle[
i * 4] / 4;
2943 bool InstructionSelector::TryMatch16x8Shuffle(
const uint8_t* shuffle,
2944 uint8_t* shuffle16x8) {
2945 for (
int i = 0;
i < 8; ++
i) {
2946 if (shuffle[
i * 2] % 2 != 0)
return false;
2947 for (
int j = 1; j < 2; ++j) {
2948 if (shuffle[
i * 2 + j] - shuffle[
i * 2 + j - 1] != 1)
return false;
2950 shuffle16x8[
i] = shuffle[
i * 2] / 2;
2956 bool InstructionSelector::TryMatchConcat(
const uint8_t* shuffle,
2959 uint8_t start = shuffle[0];
2960 if (start == 0)
return false;
2961 DCHECK_GT(kSimd128Size, start);
2964 for (
int i = 1;
i < kSimd128Size; ++
i) {
2965 if ((shuffle[
i]) != ((shuffle[
i - 1] + 1))) {
2966 if (shuffle[
i - 1] != 15)
return false;
2967 if (shuffle[
i] % kSimd128Size != 0)
return false;
2975 bool InstructionSelector::TryMatchBlend(
const uint8_t* shuffle) {
2976 for (
int i = 0;
i < 16; ++
i) {
2977 if ((shuffle[
i] & 0xF) !=
i)
return false;
2983 int32_t InstructionSelector::Pack4Lanes(
const uint8_t* shuffle) {
2985 for (
int i = 3;
i >= 0; --
i) {
2987 result |= shuffle[
i];
2992 bool InstructionSelector::NeedsPoisoning(IsSafetyCheck safety_check)
const {
2993 switch (poisoning_level_) {
2994 case PoisoningMitigationLevel::kDontPoison:
2996 case PoisoningMitigationLevel::kPoisonAll:
2997 return safety_check != IsSafetyCheck::kNoSafetyCheck;
2998 case PoisoningMitigationLevel::kPoisonCriticalOnly:
2999 return safety_check == IsSafetyCheck::kCriticalSafetyCheck;