5 #ifndef V8_COMPILER_BACKEND_INSTRUCTION_SCHEDULER_H_ 6 #define V8_COMPILER_BACKEND_INSTRUCTION_SCHEDULER_H_ 8 #include "src/compiler/backend/instruction.h" 9 #include "src/zone/zone-containers.h" 17 enum ArchOpcodeFlags {
22 kMayNeedDeoptOrTrapCheck = 4,
39 static bool SchedulerSupported();
51 void AddSuccessor(ScheduleGraphNode* node);
54 bool HasUnscheduledPredecessor() {
55 return unscheduled_predecessors_count_ != 0;
59 void DropUnscheduledPredecessor() {
60 DCHECK_LT(0, unscheduled_predecessors_count_);
61 unscheduled_predecessors_count_--;
66 int latency()
const {
return latency_; }
68 int total_latency()
const {
return total_latency_; }
69 void set_total_latency(
int latency) { total_latency_ = latency; }
71 int start_cycle()
const {
return start_cycle_; }
72 void set_start_cycle(
int start_cycle) { start_cycle_ = start_cycle; }
79 int unscheduled_predecessors_count_;
100 class SchedulingQueueBase {
103 : scheduler_(scheduler), nodes_(scheduler->zone()) {}
105 void AddNode(ScheduleGraphNode* node);
107 bool IsEmpty()
const {
return nodes_.empty(); }
117 class CriticalPathFirstQueue :
public SchedulingQueueBase {
120 : SchedulingQueueBase(scheduler) {}
124 ScheduleGraphNode* PopBestCandidate(
int cycle);
129 class StressSchedulerQueue :
public SchedulingQueueBase {
132 : SchedulingQueueBase(scheduler) {}
134 ScheduleGraphNode* PopBestCandidate(
int cycle);
137 Isolate* isolate() {
return scheduler_->isolate(); }
142 template <
typename QueueType>
143 void ScheduleBlock();
146 int GetInstructionFlags(
const Instruction* instr)
const;
147 int GetTargetInstructionFlags(
const Instruction* instr)
const;
151 bool HasSideEffect(
const Instruction* instr)
const {
152 return (GetInstructionFlags(instr) & kHasSideEffect) != 0;
156 bool IsLoadOperation(
const Instruction* instr)
const {
157 return (GetInstructionFlags(instr) & kIsLoadOperation) != 0;
167 bool MayNeedDeoptOrTrapCheck(
const Instruction* instr)
const {
168 return (GetInstructionFlags(instr) & kMayNeedDeoptOrTrapCheck) != 0;
173 bool DependsOnDeoptOrTrap(
const Instruction* instr)
const {
174 return MayNeedDeoptOrTrapCheck(instr) || instr->IsDeoptimizeCall() ||
175 instr->IsTrap() || HasSideEffect(instr) || IsLoadOperation(instr);
180 bool IsFixedRegisterParameter(
const Instruction* instr)
const {
181 return (instr->arch_opcode() == kArchNop) && (instr->OutputCount() == 1) &&
182 (instr->OutputAt(0)->IsUnallocated()) &&
183 (UnallocatedOperand::cast(instr->OutputAt(0))
184 ->HasFixedRegisterPolicy() ||
185 UnallocatedOperand::cast(instr->OutputAt(0))
186 ->HasFixedFPRegisterPolicy());
189 void ComputeTotalLatencies();
191 static int GetInstructionLatency(
const Instruction* instr);
193 Zone* zone() {
return zone_; }
195 Isolate* isolate() {
return sequence()->isolate(); }
201 friend class InstructionSchedulerTester;
204 ScheduleGraphNode* last_side_effect_instr_;
216 ScheduleGraphNode* last_live_in_reg_marker_;
220 ScheduleGraphNode* last_deopt_or_trap_;
231 #endif // V8_COMPILER_BACKEND_INSTRUCTION_SCHEDULER_H_