5 #include "src/compiler/pipeline.h" 12 #include "src/assembler-inl.h" 13 #include "src/base/adapters.h" 14 #include "src/base/optional.h" 15 #include "src/base/platform/elapsed-timer.h" 16 #include "src/bootstrapper.h" 17 #include "src/code-tracer.h" 18 #include "src/compiler.h" 19 #include "src/compiler/backend/code-generator.h" 20 #include "src/compiler/backend/frame-elider.h" 21 #include "src/compiler/backend/instruction-selector.h" 22 #include "src/compiler/backend/instruction.h" 23 #include "src/compiler/backend/jump-threading.h" 24 #include "src/compiler/backend/live-range-separator.h" 25 #include "src/compiler/backend/move-optimizer.h" 26 #include "src/compiler/backend/register-allocator-verifier.h" 27 #include "src/compiler/backend/register-allocator.h" 28 #include "src/compiler/basic-block-instrumentor.h" 29 #include "src/compiler/branch-elimination.h" 30 #include "src/compiler/bytecode-graph-builder.h" 31 #include "src/compiler/checkpoint-elimination.h" 32 #include "src/compiler/common-operator-reducer.h" 33 #include "src/compiler/compilation-dependencies.h" 34 #include "src/compiler/compiler-source-position-table.h" 35 #include "src/compiler/constant-folding-reducer.h" 36 #include "src/compiler/control-flow-optimizer.h" 37 #include "src/compiler/dead-code-elimination.h" 38 #include "src/compiler/effect-control-linearizer.h" 39 #include "src/compiler/escape-analysis-reducer.h" 40 #include "src/compiler/escape-analysis.h" 41 #include "src/compiler/graph-trimmer.h" 42 #include "src/compiler/graph-visualizer.h" 43 #include "src/compiler/js-call-reducer.h" 44 #include "src/compiler/js-context-specialization.h" 45 #include "src/compiler/js-create-lowering.h" 46 #include "src/compiler/js-generic-lowering.h" 47 #include "src/compiler/js-heap-broker.h" 48 #include "src/compiler/js-heap-copy-reducer.h" 49 #include "src/compiler/js-inlining-heuristic.h" 50 #include "src/compiler/js-intrinsic-lowering.h" 51 #include "src/compiler/js-native-context-specialization.h" 52 #include "src/compiler/js-typed-lowering.h" 53 #include "src/compiler/load-elimination.h" 54 #include "src/compiler/loop-analysis.h" 55 #include "src/compiler/loop-peeling.h" 56 #include "src/compiler/loop-variable-optimizer.h" 57 #include "src/compiler/machine-graph-verifier.h" 58 #include "src/compiler/machine-operator-reducer.h" 59 #include "src/compiler/memory-optimizer.h" 60 #include "src/compiler/node-origin-table.h" 61 #include "src/compiler/osr.h" 62 #include "src/compiler/pipeline-statistics.h" 63 #include "src/compiler/redundancy-elimination.h" 64 #include "src/compiler/schedule.h" 65 #include "src/compiler/scheduler.h" 66 #include "src/compiler/select-lowering.h" 67 #include "src/compiler/simplified-lowering.h" 68 #include "src/compiler/simplified-operator-reducer.h" 69 #include "src/compiler/simplified-operator.h" 70 #include "src/compiler/store-store-elimination.h" 71 #include "src/compiler/type-narrowing-reducer.h" 72 #include "src/compiler/typed-optimization.h" 73 #include "src/compiler/typer.h" 74 #include "src/compiler/value-numbering-reducer.h" 75 #include "src/compiler/verifier.h" 76 #include "src/compiler/wasm-compiler.h" 77 #include "src/compiler/zone-stats.h" 78 #include "src/disassembler.h" 79 #include "src/isolate-inl.h" 80 #include "src/objects/shared-function-info.h" 81 #include "src/optimized-compilation-info.h" 82 #include "src/ostreams.h" 83 #include "src/parsing/parse-info.h" 84 #include "src/register-configuration.h" 85 #include "src/utils.h" 86 #include "src/wasm/function-body-decoder.h" 87 #include "src/wasm/wasm-engine.h" 96 const int kMaxBytecodeSizeForTurbofan = 128 * 1024;
105 allocator_(isolate->allocator()),
107 debug_name_(info_->GetDebugName()),
108 may_have_unverifiable_graph_(
false),
109 zone_stats_(zone_stats),
110 pipeline_statistics_(pipeline_statistics),
111 graph_zone_scope_(zone_stats_, ZONE_NAME),
112 graph_zone_(graph_zone_scope_.zone()),
113 instruction_zone_scope_(zone_stats_, ZONE_NAME),
114 instruction_zone_(instruction_zone_scope_.zone()),
115 codegen_zone_scope_(zone_stats_, ZONE_NAME),
116 codegen_zone_(codegen_zone_scope_.zone()),
117 register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
118 register_allocation_zone_(register_allocation_zone_scope_.zone()),
119 assembler_options_(AssemblerOptions::Default(isolate)) {
120 PhaseScope scope(pipeline_statistics,
"init pipeline data");
121 graph_ =
new (graph_zone_)
Graph(graph_zone_);
123 node_origins_ = info->trace_turbo_json_enabled()
128 graph_zone_, MachineType::PointerRepresentation(),
129 InstructionSelector::SupportedMachineOperatorFlags(),
130 InstructionSelector::AlignmentRequirements());
133 jsgraph_ =
new (graph_zone_)
134 JSGraph(isolate_, graph_, common_, javascript_, simplified_, machine_);
135 broker_ =
new (info_->zone())
JSHeapBroker(isolate_, info_->zone());
148 wasm_engine_(wasm_engine),
149 allocator_(wasm_engine->allocator()),
151 debug_name_(info_->GetDebugName()),
152 may_have_unverifiable_graph_(
false),
153 zone_stats_(zone_stats),
154 pipeline_statistics_(pipeline_statistics),
155 graph_zone_scope_(zone_stats_, ZONE_NAME),
156 graph_zone_(graph_zone_scope_.zone()),
157 graph_(mcgraph->graph()),
158 source_positions_(source_positions),
159 node_origins_(node_origins),
160 machine_(mcgraph->machine()),
161 common_(mcgraph->common()),
163 instruction_zone_scope_(zone_stats_, ZONE_NAME),
164 instruction_zone_(instruction_zone_scope_.zone()),
165 codegen_zone_scope_(zone_stats_, ZONE_NAME),
166 codegen_zone_(codegen_zone_scope_.zone()),
167 register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
168 register_allocation_zone_(register_allocation_zone_scope_.zone()),
169 assembler_options_(assembler_options) {}
178 allocator_(isolate->allocator()),
180 debug_name_(info_->GetDebugName()),
181 zone_stats_(zone_stats),
182 graph_zone_scope_(zone_stats_, ZONE_NAME),
184 source_positions_(source_positions),
185 node_origins_(node_origins),
187 instruction_zone_scope_(zone_stats_, ZONE_NAME),
188 instruction_zone_(instruction_zone_scope_.zone()),
189 codegen_zone_scope_(zone_stats_, ZONE_NAME),
190 codegen_zone_(codegen_zone_scope_.zone()),
191 register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
192 register_allocation_zone_(register_allocation_zone_scope_.zone()),
193 jump_optimization_info_(jump_opt),
194 assembler_options_(assembler_options) {}
200 allocator_(isolate->allocator()),
202 debug_name_(info_->GetDebugName()),
203 zone_stats_(zone_stats),
204 graph_zone_scope_(zone_stats_, ZONE_NAME),
205 instruction_zone_scope_(zone_stats_, ZONE_NAME),
206 instruction_zone_(sequence->zone()),
208 codegen_zone_scope_(zone_stats_, ZONE_NAME),
209 codegen_zone_(codegen_zone_scope_.zone()),
210 register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
211 register_allocation_zone_(register_allocation_zone_scope_.zone()),
212 assembler_options_(AssemblerOptions::Default(isolate)) {}
216 delete code_generator_;
217 code_generator_ =
nullptr;
220 DeleteRegisterAllocationZone();
221 DeleteInstructionZone();
226 Isolate* isolate()
const {
return isolate_; }
229 ZoneStats* zone_stats()
const {
return zone_stats_; }
232 OsrHelper* osr_helper() {
return &(*osr_helper_); }
233 bool compilation_failed()
const {
return compilation_failed_; }
234 void set_compilation_failed() { compilation_failed_ =
true; }
236 bool verify_graph()
const {
return verify_graph_; }
237 void set_verify_graph(
bool value) { verify_graph_ = value; }
241 DCHECK(code_.is_null());
245 CodeGenerator* code_generator()
const {
return code_generator_; }
248 bool MayHaveUnverifiableGraph()
const {
return may_have_unverifiable_graph_; }
250 Zone* graph_zone()
const {
return graph_zone_; }
251 Graph* graph()
const {
return graph_; }
257 JSGraph* jsgraph()
const {
return jsgraph_; }
260 return handle(info()->native_context(), isolate());
263 return handle(info()->global_object(), isolate());
268 Schedule* schedule()
const {
return schedule_; }
269 void set_schedule(
Schedule* schedule) {
271 schedule_ = schedule;
273 void reset_schedule() { schedule_ =
nullptr; }
275 Zone* instruction_zone()
const {
return instruction_zone_; }
276 Zone* codegen_zone()
const {
return codegen_zone_; }
278 Frame* frame()
const {
return frame_; }
280 Zone* register_allocation_zone()
const {
return register_allocation_zone_; }
282 return register_allocation_data_;
287 profiler_data_ = profiler_data;
290 std::string
const& source_position_output()
const {
291 return source_position_output_;
293 void set_source_position_output(std::string
const& source_position_output) {
294 source_position_output_ = source_position_output;
298 return jump_optimization_info_;
302 return assembler_options_;
306 return wasm_engine_ ==
nullptr ? isolate_->GetCodeTracer()
307 : wasm_engine_->GetCodeTracer();
310 Typer* CreateTyper() {
312 typer_ =
new Typer(broker(), typer_flags_, graph());
316 void AddTyperFlag(Typer::Flag flag) {
318 typer_flags_ |= flag;
326 void DeleteGraphZone() {
327 if (graph_zone_ ==
nullptr)
return;
328 graph_zone_scope_.Destroy();
329 graph_zone_ =
nullptr;
331 source_positions_ =
nullptr;
332 node_origins_ =
nullptr;
333 simplified_ =
nullptr;
336 javascript_ =
nullptr;
342 void DeleteInstructionZone() {
343 if (instruction_zone_ ==
nullptr)
return;
344 instruction_zone_scope_.Destroy();
345 instruction_zone_ =
nullptr;
349 void DeleteCodegenZone() {
350 if (codegen_zone_ ==
nullptr)
return;
351 codegen_zone_scope_.Destroy();
352 codegen_zone_ =
nullptr;
353 dependencies_ =
nullptr;
358 void DeleteRegisterAllocationZone() {
359 if (register_allocation_zone_ ==
nullptr)
return;
360 register_allocation_zone_scope_.Destroy();
361 register_allocation_zone_ =
nullptr;
362 register_allocation_data_ =
nullptr;
365 void InitializeInstructionSequence(
const CallDescriptor* call_descriptor) {
366 DCHECK_NULL(sequence_);
368 InstructionSequence::InstructionBlocksFor(instruction_zone(),
370 sequence_ =
new (instruction_zone())
372 if (call_descriptor && call_descriptor->RequiresFrameAsIncoming()) {
373 sequence_->instruction_blocks()[0]->mark_needs_frame();
375 DCHECK_EQ(0u, call_descriptor->CalleeSavedFPRegisters());
376 DCHECK_EQ(0u, call_descriptor->CalleeSavedRegisters());
382 int fixed_frame_size = 0;
383 if (call_descriptor !=
nullptr) {
384 fixed_frame_size = call_descriptor->CalculateFixedFrameSize();
386 frame_ =
new (codegen_zone())
Frame(fixed_frame_size);
391 DCHECK_NULL(register_allocation_data_);
392 register_allocation_data_ =
new (register_allocation_zone())
394 sequence(), debug_name());
397 void InitializeOsrHelper() {
398 DCHECK(!osr_helper_.has_value());
399 osr_helper_.emplace(info());
402 void set_start_source_position(
int position) {
403 DCHECK_EQ(start_source_position_, kNoSourcePosition);
404 start_source_position_ = position;
407 void InitializeCodeGenerator(
Linkage* linkage) {
408 DCHECK_NULL(code_generator_);
411 codegen_zone(), frame(), linkage, sequence(), info(), isolate(),
412 osr_helper_, start_source_position_, jump_optimization_info_,
413 info()->GetPoisoningMitigationLevel(), assembler_options_,
414 info_->builtin_index());
417 void BeginPhaseKind(
const char* phase_kind_name) {
418 if (pipeline_statistics() !=
nullptr) {
419 pipeline_statistics()->BeginPhaseKind(phase_kind_name);
423 void EndPhaseKind() {
424 if (pipeline_statistics() !=
nullptr) {
425 pipeline_statistics()->EndPhaseKind();
429 const char* debug_name()
const {
return debug_name_.get(); }
436 std::unique_ptr<char[]> debug_name_;
437 bool may_have_unverifiable_graph_ =
true;
440 bool compilation_failed_ =
false;
441 bool verify_graph_ =
false;
442 int start_source_position_ = kNoSourcePosition;
446 Typer* typer_ =
nullptr;
452 Zone* graph_zone_ =
nullptr;
453 Graph* graph_ =
nullptr;
468 Zone* instruction_zone_;
478 Frame* frame_ =
nullptr;
484 Zone* register_allocation_zone_;
491 std::string source_position_output_;
504 template <
typename Phase>
506 template <
typename Phase,
typename Arg0>
507 void Run(Arg0 arg_0);
508 template <
typename Phase,
typename Arg0,
typename Arg1>
509 void Run(Arg0 arg_0, Arg1 arg_1);
515 bool OptimizeGraph(
Linkage* linkage);
518 void ComputeScheduledGraph();
521 bool SelectInstructions(
Linkage* linkage);
524 void AssembleCode(
Linkage* linkage);
532 void VerifyGeneratedCodeIsIdempotent();
533 void RunPrintAndVerify(
const char* phase,
bool untyped =
false);
550 if (!shared->script()->IsUndefined(isolate)) {
553 if (!script->source()->IsUndefined(isolate)) {
555 Object* source_name = script->name();
557 os <<
"--- FUNCTION SOURCE (";
558 if (source_name->IsString()) {
559 os << String::cast(source_name)->ToCString().get() <<
":";
561 os << shared->DebugName()->ToCString().get() <<
") id{";
562 os << info->optimization_id() <<
"," << source_id <<
"} start{";
563 os << shared->StartPosition() <<
"} ---\n";
566 int start = shared->StartPosition();
567 int len = shared->EndPosition() - start;
568 SubStringRange source(String::cast(script->source()), start, len);
569 for (
const auto& c : source) {
574 os <<
"\n--- END ---\n";
581 void PrintInlinedFunctionInfo(
582 OptimizedCompilationInfo* info, Isolate* isolate,
int source_id,
583 int inlining_id,
const OptimizedCompilationInfo::InlinedFunctionHolder& h) {
584 CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
585 OFStream os(tracing_scope.file());
586 os <<
"INLINE (" << h.shared_info->DebugName()->ToCString().get() <<
") id{" 587 << info->optimization_id() <<
"," << source_id <<
"} AS " << inlining_id
589 const SourcePosition position = h.position.position;
590 if (position.IsKnown()) {
591 os <<
"<" << position.InliningId() <<
":" << position.ScriptOffset() <<
">";
600 void PrintParticipatingSource(OptimizedCompilationInfo* info,
602 AllowDeferredHandleDereference allow_deference_for_print_code;
604 SourceIdAssigner id_assigner(info->inlined_functions().size());
605 PrintFunctionSource(info, isolate, -1, info->shared_info());
606 const auto& inlined = info->inlined_functions();
607 for (
unsigned id = 0;
id < inlined.size();
id++) {
608 const int source_id = id_assigner.GetIdFor(inlined[
id].shared_info);
609 PrintFunctionSource(info, isolate, source_id, inlined[
id].shared_info);
610 PrintInlinedFunctionInfo(info, isolate, source_id,
id, inlined[
id]);
615 void PrintCode(Isolate* isolate, Handle<Code> code,
616 OptimizedCompilationInfo* info) {
617 if (FLAG_print_opt_source && info->IsOptimizing()) {
618 PrintParticipatingSource(info, isolate);
621 #ifdef ENABLE_DISASSEMBLER 622 AllowDeferredHandleDereference allow_deference_for_print_code;
624 FLAG_print_code || (info->IsStub() && FLAG_print_code_stubs) ||
625 (info->IsOptimizing() && FLAG_print_opt_code &&
626 info->shared_info()->PassesFilter(FLAG_print_opt_code_filter));
628 std::unique_ptr<char[]> debug_name = info->GetDebugName();
629 CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
630 OFStream os(tracing_scope.file());
633 bool print_source = code->kind() == Code::OPTIMIZED_FUNCTION;
635 Handle<SharedFunctionInfo> shared = info->shared_info();
636 if (shared->script()->IsScript() &&
637 !Script::cast(shared->script())->source()->IsUndefined(isolate)) {
638 os <<
"--- Raw source ---\n";
639 StringCharacterStream stream(
640 String::cast(Script::cast(shared->script())->source()),
641 shared->StartPosition());
644 int source_len = shared->EndPosition() - shared->StartPosition() + 1;
645 for (
int i = 0;
i < source_len;
i++) {
646 if (stream.HasMore()) {
647 os << AsReversiblyEscapedUC16(stream.GetNext());
653 if (info->IsOptimizing()) {
654 os <<
"--- Optimized code ---\n" 655 <<
"optimization_id = " << info->optimization_id() <<
"\n";
657 os <<
"--- Code ---\n";
660 Handle<SharedFunctionInfo> shared = info->shared_info();
661 os <<
"source_position = " << shared->StartPosition() <<
"\n";
663 code->Disassemble(debug_name.get(), os);
664 os <<
"--- End code ---\n";
666 #endif // ENABLE_DISASSEMBLER 669 void TraceSchedule(OptimizedCompilationInfo* info, PipelineData* data,
670 Schedule* schedule,
const char* phase_name) {
671 if (info->trace_turbo_json_enabled()) {
672 AllowHandleDereference allow_deref;
673 TurboJsonFile json_of(info, std::ios_base::app);
674 json_of <<
"{\"name\":\"" << phase_name <<
"\",\"type\":\"schedule\"" 676 std::stringstream schedule_stream;
677 schedule_stream << *schedule;
678 std::string schedule_string(schedule_stream.str());
679 for (
const auto& c : schedule_string) {
680 json_of << AsEscapedUC16ForJSON(c);
684 if (info->trace_turbo_graph_enabled() || FLAG_trace_turbo_scheduler) {
685 AllowHandleDereference allow_deref;
686 CodeTracer::Scope tracing_scope(data->GetCodeTracer());
687 OFStream os(tracing_scope.file());
688 os <<
"-- Schedule --------------------------------------\n" << *schedule;
693 class SourcePositionWrapper final :
public Reducer {
695 SourcePositionWrapper(Reducer* reducer, SourcePositionTable* table)
696 : reducer_(reducer), table_(table) {}
697 ~SourcePositionWrapper() final = default;
699 const
char* reducer_name()
const override {
return reducer_->reducer_name(); }
701 Reduction Reduce(Node* node)
final {
702 SourcePosition
const pos = table_->GetSourcePosition(node);
703 SourcePositionTable::Scope position(table_, pos);
704 return reducer_->Reduce(node);
707 void Finalize() final { reducer_->Finalize(); }
710 Reducer*
const reducer_;
711 SourcePositionTable*
const table_;
713 DISALLOW_COPY_AND_ASSIGN(SourcePositionWrapper);
716 class NodeOriginsWrapper final :
public Reducer {
718 NodeOriginsWrapper(Reducer* reducer, NodeOriginTable* table)
719 : reducer_(reducer), table_(table) {}
720 ~NodeOriginsWrapper() final = default;
722 const
char* reducer_name()
const override {
return reducer_->reducer_name(); }
724 Reduction Reduce(Node* node)
final {
725 NodeOriginTable::Scope position(table_, reducer_name(), node);
726 return reducer_->Reduce(node);
729 void Finalize() final { reducer_->Finalize(); }
732 Reducer*
const reducer_;
733 NodeOriginTable*
const table_;
735 DISALLOW_COPY_AND_ASSIGN(NodeOriginsWrapper);
738 void AddReducer(PipelineData* data, GraphReducer* graph_reducer,
740 if (data->info()->is_source_positions_enabled()) {
741 void*
const buffer = data->graph_zone()->New(
sizeof(SourcePositionWrapper));
742 SourcePositionWrapper*
const wrapper =
743 new (buffer) SourcePositionWrapper(reducer, data->source_positions());
746 if (data->info()->trace_turbo_json_enabled()) {
747 void*
const buffer = data->graph_zone()->New(
sizeof(NodeOriginsWrapper));
748 NodeOriginsWrapper*
const wrapper =
749 new (buffer) NodeOriginsWrapper(reducer, data->node_origins());
753 graph_reducer->AddReducer(reducer);
756 class PipelineRunScope {
758 PipelineRunScope(PipelineData* data,
const char* phase_name)
760 phase_name == nullptr ? nullptr : data->pipeline_statistics(),
762 zone_scope_(data->zone_stats(), ZONE_NAME),
763 origin_scope_(data->node_origins(), phase_name) {}
765 Zone* zone() {
return zone_scope_.zone(); }
768 PhaseScope phase_scope_;
769 ZoneStats::Scope zone_scope_;
770 NodeOriginTable::PhaseScope origin_scope_;
773 PipelineStatistics* CreatePipelineStatistics(Handle<Script> script,
774 OptimizedCompilationInfo* info,
776 ZoneStats* zone_stats) {
777 PipelineStatistics* pipeline_statistics =
nullptr;
779 if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
780 pipeline_statistics =
781 new PipelineStatistics(info, isolate->GetTurboStatistics(), zone_stats);
782 pipeline_statistics->BeginPhaseKind(
"initializing");
785 if (info->trace_turbo_json_enabled()) {
786 TurboJsonFile json_of(info, std::ios_base::trunc);
787 json_of <<
"{\"function\" : ";
788 JsonPrintFunctionSource(json_of, -1, info->GetDebugName(), script, isolate,
789 info->shared_info());
790 json_of <<
",\n\"phases\":[";
793 return pipeline_statistics;
796 PipelineStatistics* CreatePipelineStatistics(
797 wasm::WasmEngine* wasm_engine, wasm::FunctionBody function_body,
798 const wasm::WasmModule* wasm_module, OptimizedCompilationInfo* info,
799 ZoneStats* zone_stats) {
800 PipelineStatistics* pipeline_statistics =
nullptr;
802 if (FLAG_turbo_stats_wasm) {
803 pipeline_statistics =
new PipelineStatistics(
804 info, wasm_engine->GetOrCreateTurboStatistics(), zone_stats);
805 pipeline_statistics->BeginPhaseKind(
"initializing");
808 if (info->trace_turbo_json_enabled()) {
809 TurboJsonFile json_of(info, std::ios_base::trunc);
810 std::unique_ptr<char[]> function_name = info->GetDebugName();
811 json_of <<
"{\"function\":\"" << function_name.get() <<
"\", \"source\":\"";
812 AccountingAllocator allocator;
813 std::ostringstream disassembly;
814 std::vector<int> source_positions;
815 wasm::PrintRawWasmCode(&allocator, function_body, wasm_module,
816 wasm::kPrintLocals, disassembly, &source_positions);
817 for (
const auto& c : disassembly.str()) {
818 json_of << AsEscapedUC16ForJSON(c);
820 json_of <<
"\",\n\"sourceLineToBytecodePosition\" : [";
821 bool insert_comma =
false;
822 for (
auto val : source_positions) {
829 json_of <<
"],\n\"phases\":[";
832 return pipeline_statistics;
846 function->GetIsolate()->stack_guard()->real_climit(),
847 &compilation_info_,
"TurboFan"),
848 zone_(function->GetIsolate()->allocator(), ZONE_NAME),
849 zone_stats_(function->GetIsolate()->allocator()),
850 compilation_info_(&zone_, function->GetIsolate(), shared_info,
852 pipeline_statistics_(CreatePipelineStatistics(
853 handle(Script::cast(shared_info->script()), isolate),
854 compilation_info(),
function->GetIsolate(), &zone_stats_)),
855 data_(&zone_stats_, function->GetIsolate(), compilation_info(),
856 pipeline_statistics_.get()),
861 Status PrepareJobImpl(
Isolate* isolate)
final;
862 Status ExecuteJobImpl()
final;
863 Status FinalizeJobImpl(
Isolate* isolate)
final;
872 std::unique_ptr<PipelineStatistics> pipeline_statistics_;
880 PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl(
882 if (compilation_info()->bytecode_array()->length() >
883 kMaxBytecodeSizeForTurbofan) {
884 return AbortOptimization(BailoutReason::kFunctionTooBig);
887 if (!FLAG_always_opt) {
888 compilation_info()->MarkAsBailoutOnUninitialized();
890 if (FLAG_turbo_loop_peeling) {
891 compilation_info()->MarkAsLoopPeelingEnabled();
893 if (FLAG_turbo_inlining) {
894 compilation_info()->MarkAsInliningEnabled();
896 if (FLAG_inline_accessors) {
897 compilation_info()->MarkAsAccessorInliningEnabled();
902 PoisoningMitigationLevel load_poisoning =
903 PoisoningMitigationLevel::kDontPoison;
904 if (FLAG_untrusted_code_mitigations) {
907 load_poisoning = PoisoningMitigationLevel::kPoisonCriticalOnly;
909 compilation_info()->SetPoisoningMitigationLevel(load_poisoning);
911 if (FLAG_turbo_allocation_folding) {
912 compilation_info()->MarkAsAllocationFoldingEnabled();
915 if (compilation_info()->closure()->feedback_cell()->map() ==
916 ReadOnlyRoots(isolate).one_closure_cell_map()) {
917 compilation_info()->MarkAsFunctionContextSpecializing();
920 data_.set_start_source_position(
921 compilation_info()->shared_info()->StartPosition());
923 linkage_ =
new (compilation_info()->zone()) Linkage(
924 Linkage::ComputeIncoming(compilation_info()->zone(), compilation_info()));
926 if (!pipeline_.CreateGraph()) {
927 if (isolate->has_pending_exception())
return FAILED;
928 return AbortOptimization(BailoutReason::kGraphBuildingFailed);
931 if (compilation_info()->is_osr()) data_.InitializeOsrHelper();
936 Deoptimizer::EnsureCodeForMaxDeoptimizationEntries(isolate);
941 PipelineCompilationJob::Status PipelineCompilationJob::ExecuteJobImpl() {
942 if (!pipeline_.OptimizeGraph(linkage_))
return FAILED;
943 pipeline_.AssembleCode(linkage_);
947 PipelineCompilationJob::Status PipelineCompilationJob::FinalizeJobImpl(
949 MaybeHandle<Code> maybe_code = pipeline_.FinalizeCode();
951 if (!maybe_code.ToHandle(&code)) {
952 if (compilation_info()->bailout_reason() == BailoutReason::kNoReason) {
953 return AbortOptimization(BailoutReason::kCodeGenerationFailed);
957 if (!pipeline_.CommitDependencies(code)) {
958 return RetryOptimization(BailoutReason::kBailedOutDueToDependencyChange);
961 compilation_info()->SetCode(code);
962 compilation_info()->native_context()->AddOptimizedCode(*code);
963 RegisterWeakObjectsInOptimizedCode(code, isolate);
967 void PipelineCompilationJob::RegisterWeakObjectsInOptimizedCode(
968 Handle<Code> code, Isolate* isolate) {
969 DCHECK(code->is_optimized_code());
970 std::vector<Handle<Map>> maps;
972 DisallowHeapAllocation no_gc;
973 int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
974 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
975 RelocInfo::Mode mode = it.rinfo()->rmode();
976 if (mode == RelocInfo::EMBEDDED_OBJECT &&
977 code->IsWeakObjectInOptimizedCode(it.rinfo()->target_object())) {
978 Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
980 if (object->IsMap()) {
981 maps.push_back(Handle<Map>::cast(
object));
986 for (Handle<Map> map : maps) {
987 isolate->heap()->AddRetainedMap(map);
989 code->set_can_have_weak_objects(
true);
992 template <
typename Phase>
993 void PipelineImpl::Run() {
994 PipelineRunScope scope(this->data_, Phase::phase_name());
996 phase.Run(this->data_, scope.zone());
999 template <
typename Phase,
typename Arg0>
1000 void PipelineImpl::Run(Arg0 arg_0) {
1001 PipelineRunScope scope(this->data_, Phase::phase_name());
1003 phase.Run(this->data_, scope.zone(), arg_0);
1006 template <
typename Phase,
typename Arg0,
typename Arg1>
1007 void PipelineImpl::Run(Arg0 arg_0, Arg1 arg_1) {
1008 PipelineRunScope scope(this->data_, Phase::phase_name());
1010 phase.Run(this->data_, scope.zone(), arg_0, arg_1);
1014 static const char* phase_name() {
return "bytecode graph builder"; }
1018 if (data->info()->is_bailout_on_uninitialized()) {
1019 flags |= JSTypeHintLowering::kBailoutOnUninitialized;
1023 temp_zone, data->info()->bytecode_array(), data->info()->shared_info(),
1024 handle(data->info()->closure()->feedback_vector(), data->isolate()),
1025 data->info()->osr_offset(), data->jsgraph(), frequency,
1026 data->source_positions(), data->native_context(),
1027 SourcePosition::kNotInlined, flags,
true,
1028 data->info()->is_analyze_environment_liveness());
1029 graph_builder.CreateGraph();
1036 Context current = closure->context();
1037 size_t distance = 0;
1038 while (!current->IsNativeContext()) {
1039 if (current->IsModuleContext()) {
1041 OuterContext(handle(current, current->GetIsolate()), distance));
1043 current = current->previous();
1046 return Nothing<OuterContext>();
1049 Maybe<OuterContext> ChooseSpecializationContext(
1050 Isolate* isolate, OptimizedCompilationInfo* info) {
1051 if (info->is_function_context_specializing()) {
1052 DCHECK(info->has_context());
1053 return Just(OuterContext(handle(info->context(), isolate), 0));
1055 return GetModuleContext(info->closure());
1061 static const char* phase_name() {
return "inlining"; }
1064 Isolate* isolate = data->isolate();
1067 data->jsgraph()->Dead());
1069 data->common(), temp_zone);
1072 data->broker(), data->common(),
1073 data->machine(), temp_zone);
1074 JSCallReducer call_reducer(&graph_reducer, data->jsgraph(), data->broker(),
1075 data->info()->is_bailout_on_uninitialized()
1076 ? JSCallReducer::kBailoutOnUninitialized
1077 : JSCallReducer::kNoFlags,
1078 data->dependencies());
1080 &graph_reducer, data->jsgraph(), data->broker(),
1081 ChooseSpecializationContext(isolate, data->info()),
1082 data->info()->is_function_context_specializing()
1083 ? data->info()->closure()
1086 JSNativeContextSpecialization::kNoFlags;
1087 if (data->info()->is_accessor_inlining_enabled()) {
1088 flags |= JSNativeContextSpecialization::kAccessorInliningEnabled;
1090 if (data->info()->is_bailout_on_uninitialized()) {
1091 flags |= JSNativeContextSpecialization::kBailoutOnUninitialized;
1097 &graph_reducer, data->jsgraph(), data->broker(), flags,
1098 data->native_context(), data->dependencies(), temp_zone, info->zone());
1100 data->info()->is_inlining_enabled()
1101 ? JSInliningHeuristic::kGeneralInlining
1102 : JSInliningHeuristic::kRestrictedInlining,
1103 temp_zone, data->info(), data->jsgraph(),
1104 data->broker(), data->source_positions());
1106 AddReducer(data, &graph_reducer, &dead_code_elimination);
1107 AddReducer(data, &graph_reducer, &checkpoint_elimination);
1108 AddReducer(data, &graph_reducer, &common_reducer);
1109 AddReducer(data, &graph_reducer, &native_context_specialization);
1110 AddReducer(data, &graph_reducer, &context_specialization);
1111 AddReducer(data, &graph_reducer, &intrinsic_lowering);
1112 AddReducer(data, &graph_reducer, &call_reducer);
1113 AddReducer(data, &graph_reducer, &inlining);
1114 graph_reducer.ReduceGraph();
1120 static const char* phase_name() {
return "typer"; }
1124 data->jsgraph()->GetCachedNodes(&roots);
1127 roots.push_back(data->jsgraph()->TrueConstant());
1128 roots.push_back(data->jsgraph()->FalseConstant());
1131 data->common(), temp_zone);
1132 if (FLAG_turbo_loop_variable) induction_vars.Run();
1133 typer->Run(roots, &induction_vars);
1138 static const char* phase_name() {
return "untyper"; }
1141 class RemoveTypeReducer final :
public Reducer {
1143 const char* reducer_name()
const override {
return "RemoveTypeReducer"; }
1145 if (NodeProperties::IsTyped(node)) {
1146 NodeProperties::RemoveType(node);
1147 return Changed(node);
1154 data->jsgraph()->GetCachedNodes(&roots);
1155 for (
Node* node : roots) {
1156 NodeProperties::RemoveType(node);
1160 data->jsgraph()->Dead());
1161 RemoveTypeReducer remove_type_reducer;
1162 AddReducer(data, &graph_reducer, &remove_type_reducer);
1163 graph_reducer.ReduceGraph();
1168 static const char* phase_name() {
return "serialize standard objects"; }
1171 data->broker()->SerializeStandardObjects();
1176 static const char* phase_name() {
return "serialize metadata"; }
1180 data->jsgraph()->Dead());
1182 AddReducer(data, &graph_reducer, &heap_copy_reducer);
1183 graph_reducer.ReduceGraph();
1187 data->jsgraph()->GetCachedNodes(&cached_nodes);
1188 for (
Node*
const node : cached_nodes) graph_reducer.ReduceNode(node);
1193 static const char* phase_name() {
return "typed lowering"; }
1197 data->jsgraph()->Dead());
1199 data->common(), temp_zone);
1201 data->jsgraph(), data->broker(),
1204 data->broker(), temp_zone);
1206 &graph_reducer, data->jsgraph(), data->broker());
1208 data->jsgraph(), data->broker());
1213 data->broker(), data->common(),
1214 data->machine(), temp_zone);
1215 AddReducer(data, &graph_reducer, &dead_code_elimination);
1216 AddReducer(data, &graph_reducer, &create_lowering);
1217 AddReducer(data, &graph_reducer, &constant_folding_reducer);
1218 AddReducer(data, &graph_reducer, &typed_lowering);
1219 AddReducer(data, &graph_reducer, &typed_optimization);
1220 AddReducer(data, &graph_reducer, &simple_reducer);
1221 AddReducer(data, &graph_reducer, &checkpoint_elimination);
1222 AddReducer(data, &graph_reducer, &common_reducer);
1223 graph_reducer.ReduceGraph();
1229 static const char* phase_name() {
return "escape analysis"; }
1233 escape_analysis.ReduceGraph();
1234 GraphReducer reducer(temp_zone, data->graph(), data->jsgraph()->Dead());
1236 escape_analysis.analysis_result(),
1238 AddReducer(data, &reducer, &escape_reducer);
1239 reducer.ReduceGraph();
1241 escape_reducer.VerifyReplacement();
1246 static const char* phase_name() {
return "simplified lowering"; }
1250 data->source_positions(), data->node_origins(),
1251 data->info()->GetPoisoningMitigationLevel());
1252 lowering.LowerAllNodes();
1257 static const char* phase_name() {
return "loop peeling"; }
1262 data->jsgraph()->GetCachedNodes(&roots);
1263 trimmer.TrimGraph(roots.begin(), roots.end());
1266 LoopFinder::BuildLoopTree(data->jsgraph()->graph(), temp_zone);
1267 LoopPeeler(data->graph(), data->common(), loop_tree, temp_zone,
1268 data->source_positions(), data->node_origins())
1269 .PeelInnerLoopsOfTree();
1274 static const char* phase_name() {
return "loop exit elimination"; }
1277 LoopPeeler::EliminateLoopExits(data->graph(), temp_zone);
1282 static const char* phase_name() {
return "generic lowering"; }
1286 data->jsgraph()->Dead());
1288 AddReducer(data, &graph_reducer, &generic_lowering);
1289 graph_reducer.ReduceGraph();
1294 static const char* phase_name() {
return "early optimization"; }
1298 data->jsgraph()->Dead());
1300 data->common(), temp_zone);
1307 data->broker(), data->common(),
1308 data->machine(), temp_zone);
1309 AddReducer(data, &graph_reducer, &dead_code_elimination);
1310 AddReducer(data, &graph_reducer, &simple_reducer);
1311 AddReducer(data, &graph_reducer, &redundancy_elimination);
1312 AddReducer(data, &graph_reducer, &machine_reducer);
1313 AddReducer(data, &graph_reducer, &common_reducer);
1314 AddReducer(data, &graph_reducer, &value_numbering);
1315 graph_reducer.ReduceGraph();
1320 static const char* phase_name() {
return "control flow optimization"; }
1324 data->machine(), temp_zone);
1325 optimizer.Optimize();
1330 static const char* phase_name() {
return "effect linearization"; }
1339 data->jsgraph()->GetCachedNodes(&roots);
1340 trimmer.TrimGraph(roots.begin(), roots.end());
1346 Schedule* schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
1347 Scheduler::kTempSchedule);
1348 if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1349 TraceSchedule(data->info(), data, schedule,
1350 "effect linearization schedule");
1352 EffectControlLinearizer::MaskArrayIndexEnable mask_array_index =
1353 (data->info()->GetPoisoningMitigationLevel() !=
1354 PoisoningMitigationLevel::kDontPoison)
1355 ? EffectControlLinearizer::kMaskArrayIndex
1356 : EffectControlLinearizer::kDoNotMaskArrayIndex;
1363 data->jsgraph(), schedule, temp_zone, data->source_positions(),
1364 data->node_origins(), mask_array_index);
1374 data->jsgraph()->Dead());
1376 data->common(), temp_zone);
1378 data->broker(), data->common(),
1379 data->machine(), temp_zone);
1380 AddReducer(data, &graph_reducer, &dead_code_elimination);
1381 AddReducer(data, &graph_reducer, &common_reducer);
1382 graph_reducer.ReduceGraph();
1388 static const char* phase_name() {
return "store-store elimination"; }
1393 data->jsgraph()->GetCachedNodes(&roots);
1394 trimmer.TrimGraph(roots.begin(), roots.end());
1396 StoreStoreElimination::Run(data->jsgraph(), temp_zone);
1401 static const char* phase_name() {
return "load elimination"; }
1405 data->jsgraph()->Dead());
1407 data->jsgraph(), temp_zone);
1409 data->common(), temp_zone);
1416 data->broker(), data->common(),
1417 data->machine(), temp_zone);
1419 data->jsgraph(), data->broker());
1421 &graph_reducer, data->jsgraph(), data->broker());
1424 AddReducer(data, &graph_reducer, &branch_condition_elimination);
1425 AddReducer(data, &graph_reducer, &dead_code_elimination);
1426 AddReducer(data, &graph_reducer, &redundancy_elimination);
1427 AddReducer(data, &graph_reducer, &load_elimination);
1428 AddReducer(data, &graph_reducer, &type_narrowing_reducer);
1429 AddReducer(data, &graph_reducer, &constant_folding_reducer);
1430 AddReducer(data, &graph_reducer, &typed_optimization);
1431 AddReducer(data, &graph_reducer, &checkpoint_elimination);
1432 AddReducer(data, &graph_reducer, &common_reducer);
1433 AddReducer(data, &graph_reducer, &value_numbering);
1434 graph_reducer.ReduceGraph();
1439 static const char* phase_name() {
return "memory optimization"; }
1445 data->jsgraph()->GetCachedNodes(&roots);
1446 trimmer.TrimGraph(roots.begin(), roots.end());
1450 data->jsgraph(), temp_zone, data->info()->GetPoisoningMitigationLevel(),
1451 data->info()->is_allocation_folding_enabled()
1452 ? MemoryOptimizer::AllocationFolding::kDoAllocationFolding
1453 : MemoryOptimizer::AllocationFolding::kDontAllocationFolding);
1454 optimizer.Optimize();
1459 static const char* phase_name() {
return "late optimization"; }
1463 data->jsgraph()->Dead());
1465 data->jsgraph(), temp_zone);
1467 data->common(), temp_zone);
1471 data->broker(), data->common(),
1472 data->machine(), temp_zone);
1474 data->jsgraph()->common());
1475 AddReducer(data, &graph_reducer, &branch_condition_elimination);
1476 AddReducer(data, &graph_reducer, &dead_code_elimination);
1477 AddReducer(data, &graph_reducer, &machine_reducer);
1478 AddReducer(data, &graph_reducer, &common_reducer);
1479 AddReducer(data, &graph_reducer, &select_lowering);
1480 AddReducer(data, &graph_reducer, &value_numbering);
1481 graph_reducer.ReduceGraph();
1486 static const char* phase_name() {
return "early trimming"; }
1490 data->jsgraph()->GetCachedNodes(&roots);
1491 trimmer.TrimGraph(roots.begin(), roots.end());
1497 static const char* phase_name() {
return "late graph trimming"; }
1501 if (data->jsgraph()) {
1502 data->jsgraph()->GetCachedNodes(&roots);
1504 trimmer.TrimGraph(roots.begin(), roots.end());
1510 static const char* phase_name() {
return "scheduling"; }
1513 Schedule* schedule = Scheduler::ComputeSchedule(
1514 temp_zone, data->graph(), data->info()->is_splitting_enabled()
1515 ? Scheduler::kSplitNodes
1516 : Scheduler::kNoFlags);
1517 if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1518 data->set_schedule(schedule);
1528 const int max =
static_cast<int>(s.sequence->LastInstructionIndex());
1530 out <<
", \"nodeIdToInstructionRange\": {";
1531 bool need_comma =
false;
1532 for (
size_t i = 0;
i < s.instr_origins->size(); ++
i) {
1533 std::pair<int, int> offset = (*s.instr_origins)[
i];
1534 if (offset.first == -1)
continue;
1535 const int first = max - offset.first + 1;
1536 const int second = max - offset.second + 1;
1537 if (need_comma) out <<
", ";
1538 out <<
"\"" <<
i <<
"\": [" << first <<
", " << second <<
"]";
1542 out <<
", \"blockIdtoInstructionRange\": {";
1544 for (
auto block : s.sequence->instruction_blocks()) {
1545 if (need_comma) out <<
", ";
1546 out <<
"\"" << block->rpo_number() <<
"\": [" << block->code_start() <<
", " 1547 << block->code_end() <<
"]";
1555 static const char* phase_name() {
return "select instructions"; }
1559 temp_zone, data->graph()->NodeCount(), linkage, data->sequence(),
1560 data->schedule(), data->source_positions(), data->frame(),
1561 data->info()->switch_jump_table_enabled()
1562 ? InstructionSelector::kEnableSwitchJumpTable
1563 : InstructionSelector::kDisableSwitchJumpTable,
1564 data->info()->is_source_positions_enabled()
1565 ? InstructionSelector::kAllSourcePositions
1566 : InstructionSelector::kCallSourcePositions,
1567 InstructionSelector::SupportedFeatures(),
1568 FLAG_turbo_instruction_scheduling
1569 ? InstructionSelector::kEnableScheduling
1570 : InstructionSelector::kDisableScheduling,
1571 !data->isolate() || data->isolate()->serializer_enabled() ||
1572 data->isolate()->ShouldLoadConstantsFromRootList()
1573 ? InstructionSelector::kDisableRootsRelativeAddressing
1574 : InstructionSelector::kEnableRootsRelativeAddressing,
1575 data->info()->GetPoisoningMitigationLevel(),
1576 data->info()->trace_turbo_json_enabled()
1577 ? InstructionSelector::kEnableTraceTurboJson
1578 : InstructionSelector::kDisableTraceTurboJson);
1579 if (!selector.SelectInstructions()) {
1580 data->set_compilation_failed();
1582 if (data->info()->trace_turbo_json_enabled()) {
1584 json_of <<
"{\"name\":\"" << phase_name()
1585 <<
"\",\"type\":\"instructions\"" 1587 &selector.instr_origins()}
1595 static const char* phase_name() {
return "meet register constraints"; }
1599 builder.MeetRegisterConstraints();
1605 static const char* phase_name() {
return "resolve phis"; }
1609 builder.ResolvePhis();
1615 static const char* phase_name() {
return "build live ranges"; }
1619 builder.BuildLiveRanges();
1625 static const char* phase_name() {
return "splinter live ranges"; }
1630 live_range_splinterer.Splinter();
1635 template <
typename RegAllocator>
1637 static const char* phase_name() {
return "allocate general registers"; }
1640 RegAllocator allocator(data->register_allocation_data(), GENERAL_REGISTERS,
1642 allocator.AllocateRegisters();
1646 template <
typename RegAllocator>
1648 static const char* phase_name() {
return "allocate f.p. registers"; }
1651 RegAllocator allocator(data->register_allocation_data(), FP_REGISTERS,
1653 allocator.AllocateRegisters();
1659 static const char* phase_name() {
return "merge splintered ranges"; }
1663 live_range_merger.Merge();
1669 static const char* phase_name() {
return "locate spill slots"; }
1673 locator.LocateSpillSlots();
1679 static const char* phase_name() {
return "assign spill slots"; }
1683 assigner.AssignSpillSlots();
1689 static const char* phase_name() {
return "commit assignment"; }
1693 assigner.CommitAssignment();
1699 static const char* phase_name() {
return "populate pointer maps"; }
1703 populator.PopulateReferenceMaps();
1709 static const char* phase_name() {
return "connect ranges"; }
1713 connector.ConnectRanges(temp_zone);
1719 static const char* phase_name() {
return "resolve control flow"; }
1723 connector.ResolveControlFlow(temp_zone);
1729 static const char* phase_name() {
return "optimize moves"; }
1733 move_optimizer.Run();
1739 static const char* phase_name() {
return "frame elision"; }
1748 static const char* phase_name() {
return "jump threading"; }
1752 if (JumpThreading::ComputeForwarding(temp_zone, result, data->sequence(),
1754 JumpThreading::ApplyForwarding(temp_zone, result, data->sequence());
1760 static const char* phase_name() {
return "assemble code"; }
1763 data->code_generator()->AssembleCode();
1768 static const char* phase_name() {
return "finalize code"; }
1771 data->set_code(data->code_generator()->FinalizeCode());
1777 static const char* phase_name() {
return nullptr; }
1781 Graph* graph = data->graph();
1783 if (info->trace_turbo_json_enabled()) {
1787 json_of <<
"{\"name\":\"" << phase <<
"\",\"type\":\"graph\",\"data\":" 1788 << AsJSON(*graph, data->source_positions(), data->node_origins())
1792 if (info->trace_turbo_scheduled_enabled()) {
1794 Schedule* schedule = data->schedule();
1795 if (schedule ==
nullptr) {
1796 schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
1797 Scheduler::kNoFlags);
1803 os <<
"-- Graph after " << phase <<
" -- " << std::endl;
1805 }
else if (info->trace_turbo_graph_enabled()) {
1809 os <<
"-- Graph after " << phase <<
" -- " << std::endl;
1810 os <<
AsRPO(*graph);
1817 static const char* phase_name() {
return nullptr; }
1820 bool values_only =
false) {
1821 Verifier::CodeType code_type;
1822 switch (data->info()->code_kind()) {
1823 case Code::WASM_FUNCTION:
1824 case Code::WASM_TO_JS_FUNCTION:
1825 case Code::JS_TO_WASM_FUNCTION:
1826 case Code::WASM_INTERPRETER_ENTRY:
1827 case Code::C_WASM_ENTRY:
1828 code_type = Verifier::kWasm;
1831 code_type = Verifier::kDefault;
1833 Verifier::Run(data->graph(), !untyped ? Verifier::TYPED : Verifier::UNTYPED,
1834 values_only ? Verifier::kValuesOnly : Verifier::kAll,
1839 void PipelineImpl::RunPrintAndVerify(
const char* phase,
bool untyped) {
1840 if (info()->trace_turbo_json_enabled() ||
1841 info()->trace_turbo_graph_enabled()) {
1842 Run<PrintGraphPhase>(phase);
1844 if (FLAG_turbo_verify) {
1845 Run<VerifyGraphPhase>(untyped);
1849 bool PipelineImpl::CreateGraph() {
1850 PipelineData* data = this->data_;
1852 data->BeginPhaseKind(
"graph creation");
1854 if (info()->trace_turbo_json_enabled() ||
1855 info()->trace_turbo_graph_enabled()) {
1856 CodeTracer::Scope tracing_scope(data->GetCodeTracer());
1857 OFStream os(tracing_scope.file());
1858 os <<
"---------------------------------------------------\n" 1859 <<
"Begin compiling method " << info()->GetDebugName().get()
1860 <<
" using Turbofan" << std::endl;
1862 if (info()->trace_turbo_json_enabled()) {
1863 TurboCfgFile tcf(isolate());
1864 tcf << AsC1VCompilation(info());
1867 data->source_positions()->AddDecorator();
1868 if (data->info()->trace_turbo_json_enabled()) {
1869 data->node_origins()->AddDecorator();
1872 Run<GraphBuilderPhase>();
1873 RunPrintAndVerify(GraphBuilderPhase::phase_name(),
true);
1875 if (FLAG_concurrent_inlining) {
1876 data->broker()->StartSerializing();
1877 Run<SerializeStandardObjectsPhase>();
1878 Run<CopyMetadataForConcurrentCompilePhase>();
1880 data->broker()->SetNativeContextRef();
1884 Run<InliningPhase>();
1885 RunPrintAndVerify(InliningPhase::phase_name(),
true);
1888 Run<EarlyGraphTrimmingPhase>();
1889 RunPrintAndVerify(EarlyGraphTrimmingPhase::phase_name(),
true);
1893 if (is_sloppy(info()->shared_info()->language_mode()) &&
1894 info()->shared_info()->IsUserJavaScript()) {
1896 data->AddTyperFlag(Typer::kThisIsReceiver);
1898 if (IsClassConstructor(info()->shared_info()->kind())) {
1900 data->AddTyperFlag(Typer::kNewTargetIsReceiver);
1906 if (FLAG_concurrent_inlining) {
1909 Run<CopyMetadataForConcurrentCompilePhase>();
1910 data->broker()->StopSerializing();
1912 data->broker()->StartSerializing();
1913 Run<SerializeStandardObjectsPhase>();
1914 Run<CopyMetadataForConcurrentCompilePhase>();
1915 data->broker()->StopSerializing();
1919 data->EndPhaseKind();
1924 bool PipelineImpl::OptimizeGraph(Linkage* linkage) {
1925 PipelineData* data = this->data_;
1927 data->BeginPhaseKind(
"lowering");
1931 Run<TyperPhase>(data->CreateTyper());
1932 RunPrintAndVerify(TyperPhase::phase_name());
1933 Run<TypedLoweringPhase>();
1934 RunPrintAndVerify(TypedLoweringPhase::phase_name());
1936 if (data->info()->is_loop_peeling_enabled()) {
1937 Run<LoopPeelingPhase>();
1938 RunPrintAndVerify(LoopPeelingPhase::phase_name(),
true);
1940 Run<LoopExitEliminationPhase>();
1941 RunPrintAndVerify(LoopExitEliminationPhase::phase_name(),
true);
1944 if (FLAG_turbo_load_elimination) {
1945 Run<LoadEliminationPhase>();
1946 RunPrintAndVerify(LoadEliminationPhase::phase_name());
1948 data->DeleteTyper();
1950 if (FLAG_turbo_escape) {
1951 Run<EscapeAnalysisPhase>();
1952 if (data->compilation_failed()) {
1953 info()->AbortOptimization(
1954 BailoutReason::kCyclicObjectStateDetectedInEscapeAnalysis);
1955 data->EndPhaseKind();
1958 RunPrintAndVerify(EscapeAnalysisPhase::phase_name());
1964 Run<SimplifiedLoweringPhase>();
1965 RunPrintAndVerify(SimplifiedLoweringPhase::phase_name(),
true);
1975 Run<UntyperPhase>();
1976 RunPrintAndVerify(UntyperPhase::phase_name(),
true);
1980 Run<GenericLoweringPhase>();
1981 RunPrintAndVerify(GenericLoweringPhase::phase_name(),
true);
1983 data->BeginPhaseKind(
"block building");
1986 Run<EarlyOptimizationPhase>();
1987 RunPrintAndVerify(EarlyOptimizationPhase::phase_name(),
true);
1989 Run<EffectControlLinearizationPhase>();
1990 RunPrintAndVerify(EffectControlLinearizationPhase::phase_name(),
true);
1992 if (FLAG_turbo_store_elimination) {
1993 Run<StoreStoreEliminationPhase>();
1994 RunPrintAndVerify(StoreStoreEliminationPhase::phase_name(),
true);
1998 if (FLAG_turbo_cf_optimization) {
1999 Run<ControlFlowOptimizationPhase>();
2000 RunPrintAndVerify(ControlFlowOptimizationPhase::phase_name(),
true);
2004 Run<MemoryOptimizationPhase>();
2006 RunPrintAndVerify(MemoryOptimizationPhase::phase_name(),
true);
2009 Run<LateOptimizationPhase>();
2011 RunPrintAndVerify(LateOptimizationPhase::phase_name(),
true);
2013 data->source_positions()->RemoveDecorator();
2014 if (data->info()->trace_turbo_json_enabled()) {
2015 data->node_origins()->RemoveDecorator();
2018 ComputeScheduledGraph();
2020 return SelectInstructions(linkage);
2023 MaybeHandle<Code> Pipeline::GenerateCodeForCodeStub(
2024 Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
2025 Schedule* schedule, Code::Kind kind,
const char* debug_name,
2026 uint32_t stub_key, int32_t builtin_index, JumpOptimizationInfo* jump_opt,
2027 PoisoningMitigationLevel poisoning_level,
const AssemblerOptions& options) {
2028 OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2029 info.set_builtin_index(builtin_index);
2030 info.set_stub_key(stub_key);
2032 if (poisoning_level != PoisoningMitigationLevel::kDontPoison) {
2033 info.SetPoisoningMitigationLevel(poisoning_level);
2037 ZoneStats zone_stats(isolate->allocator());
2038 NodeOriginTable node_origins(graph);
2039 PipelineData data(&zone_stats, &info, isolate, graph, schedule,
nullptr,
2040 &node_origins, jump_opt, options);
2041 data.set_verify_graph(FLAG_verify_csa);
2042 std::unique_ptr<PipelineStatistics> pipeline_statistics;
2043 if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2044 pipeline_statistics.reset(
new PipelineStatistics(
2045 &info, isolate->GetTurboStatistics(), &zone_stats));
2046 pipeline_statistics->BeginPhaseKind(
"stub codegen");
2049 PipelineImpl pipeline(&data);
2051 if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2052 CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2053 OFStream os(tracing_scope.file());
2054 os <<
"---------------------------------------------------\n" 2055 <<
"Begin compiling " << debug_name <<
" using Turbofan" << std::endl;
2056 if (info.trace_turbo_json_enabled()) {
2057 TurboJsonFile json_of(&info, std::ios_base::trunc);
2058 json_of <<
"{\"function\" : ";
2059 JsonPrintFunctionSource(json_of, -1, info.GetDebugName(),
2060 Handle<Script>(), isolate,
2061 Handle<SharedFunctionInfo>());
2062 json_of <<
",\n\"phases\":[";
2064 pipeline.Run<PrintGraphPhase>(
"Machine");
2067 if (FLAG_optimize_csa) {
2068 DCHECK_NULL(data.schedule());
2069 pipeline.Run<VerifyGraphPhase>(
true, !FLAG_optimize_csa);
2070 pipeline.ComputeScheduledGraph();
2072 TraceSchedule(data.info(), &data, data.schedule(),
"schedule");
2074 DCHECK_NOT_NULL(data.schedule());
2076 return pipeline.GenerateCode(call_descriptor);
2080 wasm::WasmCode* Pipeline::GenerateCodeForWasmNativeStub(
2081 wasm::WasmEngine* wasm_engine, CallDescriptor* call_descriptor,
2082 MachineGraph* mcgraph, Code::Kind kind,
int wasm_kind,
2083 const char* debug_name,
const AssemblerOptions& options,
2084 wasm::NativeModule* native_module, SourcePositionTable* source_positions) {
2085 Graph* graph = mcgraph->graph();
2086 OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2088 ZoneStats zone_stats(wasm_engine->allocator());
2089 NodeOriginTable* node_positions =
new (graph->zone()) NodeOriginTable(graph);
2090 PipelineData data(&zone_stats, wasm_engine, &info, mcgraph,
nullptr,
2091 source_positions, node_positions, options);
2092 std::unique_ptr<PipelineStatistics> pipeline_statistics;
2093 if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2094 pipeline_statistics.reset(
new PipelineStatistics(
2095 &info, wasm_engine->GetOrCreateTurboStatistics(), &zone_stats));
2096 pipeline_statistics->BeginPhaseKind(
"wasm stub codegen");
2099 PipelineImpl pipeline(&data);
2101 if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2102 CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2103 OFStream os(tracing_scope.file());
2104 os <<
"---------------------------------------------------\n" 2105 <<
"Begin compiling method " << info.GetDebugName().get()
2106 <<
" using Turbofan" << std::endl;
2109 if (info.trace_turbo_graph_enabled()) {
2110 StdoutStream{} <<
"-- wasm stub " << Code::Kind2String(kind) <<
" graph -- " 2115 if (info.trace_turbo_json_enabled()) {
2116 TurboJsonFile json_of(&info, std::ios_base::trunc);
2117 json_of <<
"{\"function\":\"" << info.GetDebugName().get()
2118 <<
"\", \"source\":\"\",\n\"phases\":[";
2121 pipeline.RunPrintAndVerify(
"machine",
true);
2122 pipeline.ComputeScheduledGraph();
2124 Linkage linkage(call_descriptor);
2125 if (!pipeline.SelectInstructions(&linkage))
return nullptr;
2126 pipeline.AssembleCode(&linkage);
2128 CodeGenerator* code_generator = pipeline.code_generator();
2130 code_generator->tasm()->GetCode(
nullptr, &code_desc);
2132 wasm::WasmCode* code = native_module->AddCode(
2133 wasm::WasmCode::kAnonymousFuncIndex, code_desc,
2134 code_generator->frame()->GetTotalFrameSlotCount(),
2135 code_generator->GetSafepointTableOffset(),
2136 code_generator->GetHandlerTableOffset(),
2137 code_generator->GetProtectedInstructions(),
2138 code_generator->GetSourcePositionTable(),
2139 static_cast<wasm::WasmCode::Kind
>(wasm_kind), wasm::WasmCode::kOther);
2141 if (info.trace_turbo_json_enabled()) {
2142 TurboJsonFile json_of(&info, std::ios_base::app);
2143 json_of <<
"{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
2144 #ifdef ENABLE_DISASSEMBLER 2145 std::stringstream disassembler_stream;
2146 Disassembler::Decode(
2147 nullptr, &disassembler_stream, code->instructions().start(),
2148 code->instructions().start() + code->safepoint_table_offset(),
2149 CodeReference(code));
2150 for (
auto const c : disassembler_stream.str()) {
2151 json_of << AsEscapedUC16ForJSON(c);
2153 #endif // ENABLE_DISASSEMBLER 2154 json_of <<
"\"}\n]";
2158 if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2159 CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2160 OFStream os(tracing_scope.file());
2161 os <<
"---------------------------------------------------\n" 2162 <<
"Finished compiling method " << info.GetDebugName().get()
2163 <<
" using Turbofan" << std::endl;
2170 MaybeHandle<Code> Pipeline::GenerateCodeForWasmHeapStub(
2171 Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
2172 Code::Kind kind,
const char* debug_name,
const AssemblerOptions& options,
2173 SourcePositionTable* source_positions) {
2174 OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2176 ZoneStats zone_stats(isolate->allocator());
2177 NodeOriginTable* node_positions =
new (graph->zone()) NodeOriginTable(graph);
2178 PipelineData data(&zone_stats, &info, isolate, graph,
nullptr,
2179 source_positions, node_positions,
nullptr, options);
2180 std::unique_ptr<PipelineStatistics> pipeline_statistics;
2181 if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2182 pipeline_statistics.reset(
new PipelineStatistics(
2183 &info, isolate->GetTurboStatistics(), &zone_stats));
2184 pipeline_statistics->BeginPhaseKind(
"wasm stub codegen");
2187 PipelineImpl pipeline(&data);
2189 if (info.trace_turbo_json_enabled() ||
2190 info.trace_turbo_graph_enabled()) {
2191 CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2192 OFStream os(tracing_scope.file());
2193 os <<
"---------------------------------------------------\n" 2194 <<
"Begin compiling method " << info.GetDebugName().get()
2195 <<
" using Turbofan" << std::endl;
2198 if (info.trace_turbo_graph_enabled()) {
2199 StdoutStream{} <<
"-- wasm stub " << Code::Kind2String(kind) <<
" graph -- " 2204 if (info.trace_turbo_json_enabled()) {
2205 TurboJsonFile json_of(&info, std::ios_base::trunc);
2206 json_of <<
"{\"function\":\"" << info.GetDebugName().get()
2207 <<
"\", \"source\":\"\",\n\"phases\":[";
2210 pipeline.RunPrintAndVerify(
"machine",
true);
2211 pipeline.ComputeScheduledGraph();
2214 if (pipeline.GenerateCode(call_descriptor).ToHandle(&code) &&
2215 pipeline.CommitDependencies(code)) {
2218 return MaybeHandle<Code>();
2222 MaybeHandle<Code> Pipeline::GenerateCodeForTesting(
2223 OptimizedCompilationInfo* info, Isolate* isolate) {
2224 ZoneStats zone_stats(isolate->allocator());
2225 std::unique_ptr<PipelineStatistics> pipeline_statistics(
2226 CreatePipelineStatistics(Handle<Script>::null(), info, isolate,
2228 PipelineData data(&zone_stats, isolate, info, pipeline_statistics.get());
2229 PipelineImpl pipeline(&data);
2231 Linkage linkage(Linkage::ComputeIncoming(data.instruction_zone(), info));
2232 Deoptimizer::EnsureCodeForMaxDeoptimizationEntries(isolate);
2234 if (!pipeline.CreateGraph())
return MaybeHandle<Code>();
2235 if (!pipeline.OptimizeGraph(&linkage))
return MaybeHandle<Code>();
2236 pipeline.AssembleCode(&linkage);
2238 if (pipeline.FinalizeCode().ToHandle(&code) &&
2239 pipeline.CommitDependencies(code)) {
2242 return MaybeHandle<Code>();
2246 MaybeHandle<Code> Pipeline::GenerateCodeForTesting(
2247 OptimizedCompilationInfo* info, Isolate* isolate,
2248 CallDescriptor* call_descriptor, Graph* graph,
2249 const AssemblerOptions& options, Schedule* schedule) {
2251 ZoneStats zone_stats(isolate->allocator());
2252 NodeOriginTable* node_positions =
new (info->zone()) NodeOriginTable(graph);
2253 PipelineData data(&zone_stats, info, isolate, graph, schedule,
nullptr,
2254 node_positions,
nullptr, options);
2255 std::unique_ptr<PipelineStatistics> pipeline_statistics;
2256 if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2257 pipeline_statistics.reset(
new PipelineStatistics(
2258 info, isolate->GetTurboStatistics(), &zone_stats));
2259 pipeline_statistics->BeginPhaseKind(
"test codegen");
2262 PipelineImpl pipeline(&data);
2264 if (info->trace_turbo_json_enabled()) {
2265 TurboJsonFile json_of(info, std::ios_base::trunc);
2266 json_of <<
"{\"function\":\"" << info->GetDebugName().get()
2267 <<
"\", \"source\":\"\",\n\"phases\":[";
2270 pipeline.RunPrintAndVerify(
"machine",
true);
2273 if (data.schedule() ==
nullptr) {
2274 pipeline.ComputeScheduledGraph();
2278 if (pipeline.GenerateCode(call_descriptor).ToHandle(&code) &&
2279 pipeline.CommitDependencies(code)) {
2282 return MaybeHandle<Code>();
2286 OptimizedCompilationJob* Pipeline::NewCompilationJob(
2287 Isolate* isolate, Handle<JSFunction>
function,
bool has_script) {
2288 Handle<SharedFunctionInfo> shared =
2289 handle(function->shared(),
function->GetIsolate());
2290 return new PipelineCompilationJob(isolate, shared,
function);
2294 wasm::WasmCode* Pipeline::GenerateCodeForWasmFunction(
2295 OptimizedCompilationInfo* info, wasm::WasmEngine* wasm_engine,
2296 MachineGraph* mcgraph, CallDescriptor* call_descriptor,
2297 SourcePositionTable* source_positions, NodeOriginTable* node_origins,
2298 wasm::FunctionBody function_body, wasm::NativeModule* native_module,
2299 int function_index) {
2300 ZoneStats zone_stats(wasm_engine->allocator());
2301 std::unique_ptr<PipelineStatistics> pipeline_statistics(
2302 CreatePipelineStatistics(wasm_engine, function_body,
2303 native_module->module(), info, &zone_stats));
2304 PipelineData data(&zone_stats, wasm_engine, info, mcgraph,
2305 pipeline_statistics.get(), source_positions, node_origins,
2306 WasmAssemblerOptions());
2308 PipelineImpl pipeline(&data);
2310 if (data.info()->trace_turbo_json_enabled() ||
2311 data.info()->trace_turbo_graph_enabled()) {
2312 CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2313 OFStream os(tracing_scope.file());
2314 os <<
"---------------------------------------------------\n" 2315 <<
"Begin compiling method " << data.info()->GetDebugName().get()
2316 <<
" using Turbofan" << std::endl;
2319 pipeline.RunPrintAndVerify(
"Machine",
true);
2321 data.BeginPhaseKind(
"wasm optimization");
2322 const bool is_asm_js = native_module->module()->origin == wasm::kAsmJsOrigin;
2323 if (FLAG_turbo_splitting && !is_asm_js) {
2324 data.info()->MarkAsSplittingEnabled();
2326 if (FLAG_wasm_opt || is_asm_js) {
2327 PipelineRunScope scope(&data,
"wasm full optimization");
2328 GraphReducer graph_reducer(scope.zone(), data.graph(),
2329 data.mcgraph()->Dead());
2330 DeadCodeElimination dead_code_elimination(&graph_reducer, data.graph(),
2331 data.common(), scope.zone());
2332 ValueNumberingReducer value_numbering(scope.zone(), data.graph()->zone());
2333 const bool allow_signalling_nan = is_asm_js;
2334 MachineOperatorReducer machine_reducer(data.mcgraph(),
2335 allow_signalling_nan);
2336 CommonOperatorReducer common_reducer(&graph_reducer, data.graph(),
2337 data.broker(), data.common(),
2338 data.machine(), scope.zone());
2339 AddReducer(&data, &graph_reducer, &dead_code_elimination);
2340 AddReducer(&data, &graph_reducer, &machine_reducer);
2341 AddReducer(&data, &graph_reducer, &common_reducer);
2342 AddReducer(&data, &graph_reducer, &value_numbering);
2343 graph_reducer.ReduceGraph();
2345 PipelineRunScope scope(&data,
"wasm base optimization");
2346 GraphReducer graph_reducer(scope.zone(), data.graph(),
2347 data.mcgraph()->Dead());
2348 ValueNumberingReducer value_numbering(scope.zone(), data.graph()->zone());
2349 AddReducer(&data, &graph_reducer, &value_numbering);
2350 graph_reducer.ReduceGraph();
2352 pipeline.RunPrintAndVerify(
"wasm optimization",
true);
2354 if (data.node_origins()) {
2355 data.node_origins()->RemoveDecorator();
2358 pipeline.ComputeScheduledGraph();
2360 Linkage linkage(call_descriptor);
2361 if (!pipeline.SelectInstructions(&linkage))
return nullptr;
2362 pipeline.AssembleCode(&linkage);
2364 CodeGenerator* code_generator = pipeline.code_generator();
2366 code_generator->tasm()->GetCode(
nullptr, &code_desc);
2368 wasm::WasmCode* code = native_module->AddCode(
2369 function_index, code_desc,
2370 code_generator->frame()->GetTotalFrameSlotCount(),
2371 code_generator->GetSafepointTableOffset(),
2372 code_generator->GetHandlerTableOffset(),
2373 code_generator->GetProtectedInstructions(),
2374 code_generator->GetSourcePositionTable(), wasm::WasmCode::kFunction,
2375 wasm::WasmCode::kTurbofan);
2377 if (data.info()->trace_turbo_json_enabled()) {
2378 TurboJsonFile json_of(data.info(), std::ios_base::app);
2379 json_of <<
"{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
2380 #ifdef ENABLE_DISASSEMBLER 2381 std::stringstream disassembler_stream;
2382 Disassembler::Decode(
2383 nullptr, &disassembler_stream, code->instructions().start(),
2384 code->instructions().start() + code->safepoint_table_offset(),
2385 CodeReference(code));
2386 for (
auto const c : disassembler_stream.str()) {
2387 json_of << AsEscapedUC16ForJSON(c);
2389 #endif // ENABLE_DISASSEMBLER 2390 json_of <<
"\"}\n]";
2394 if (data.info()->trace_turbo_json_enabled() ||
2395 data.info()->trace_turbo_graph_enabled()) {
2396 CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2397 OFStream os(tracing_scope.file());
2398 os <<
"---------------------------------------------------\n" 2399 <<
"Finished compiling method " << data.info()->GetDebugName().get()
2400 <<
" using Turbofan" << std::endl;
2406 bool Pipeline::AllocateRegistersForTesting(
const RegisterConfiguration* config,
2407 InstructionSequence* sequence,
2408 bool run_verifier) {
2409 OptimizedCompilationInfo info(ArrayVector(
"testing"), sequence->zone(),
2411 ZoneStats zone_stats(sequence->isolate()->allocator());
2412 PipelineData data(&zone_stats, &info, sequence->isolate(), sequence);
2413 data.InitializeFrameData(
nullptr);
2414 PipelineImpl pipeline(&data);
2415 pipeline.AllocateRegisters(config,
nullptr, run_verifier);
2416 return !data.compilation_failed();
2419 void PipelineImpl::ComputeScheduledGraph() {
2420 PipelineData* data = this->data_;
2423 DCHECK_NULL(data->schedule());
2425 Run<LateGraphTrimmingPhase>();
2426 RunPrintAndVerify(LateGraphTrimmingPhase::phase_name(),
true);
2428 Run<ComputeSchedulePhase>();
2429 TraceSchedule(data->info(), data, data->schedule(),
"schedule");
2432 bool PipelineImpl::SelectInstructions(Linkage* linkage) {
2433 auto call_descriptor = linkage->GetIncomingDescriptor();
2434 PipelineData* data = this->data_;
2437 DCHECK_NOT_NULL(data->graph());
2438 DCHECK_NOT_NULL(data->schedule());
2440 if (FLAG_turbo_profiling) {
2441 data->set_profiler_data(BasicBlockInstrumentor::Instrument(
2442 info(), data->graph(), data->schedule(), data->isolate()));
2445 bool verify_stub_graph = data->verify_graph();
2450 auto jump_opt = data->jump_optimization_info();
2451 if (jump_opt && jump_opt->is_optimizing()) {
2452 verify_stub_graph =
false;
2454 if (verify_stub_graph ||
2455 (FLAG_turbo_verify_machine_graph !=
nullptr &&
2456 (!strcmp(FLAG_turbo_verify_machine_graph,
"*") ||
2457 !strcmp(FLAG_turbo_verify_machine_graph, data->debug_name())))) {
2458 if (FLAG_trace_verify_csa) {
2459 AllowHandleDereference allow_deref;
2460 CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2461 OFStream os(tracing_scope.file());
2462 os <<
"--------------------------------------------------\n" 2463 <<
"--- Verifying " << data->debug_name() <<
" generated by TurboFan\n" 2464 <<
"--------------------------------------------------\n" 2465 << *data->schedule()
2466 <<
"--------------------------------------------------\n" 2467 <<
"--- End of " << data->debug_name() <<
" generated by TurboFan\n" 2468 <<
"--------------------------------------------------\n";
2470 Zone temp_zone(data->allocator(), ZONE_NAME);
2471 MachineGraphVerifier::Run(data->graph(), data->schedule(), linkage,
2472 data->info()->IsStub(), data->debug_name(),
2476 data->InitializeInstructionSequence(call_descriptor);
2478 data->InitializeFrameData(call_descriptor);
2480 Run<InstructionSelectionPhase>(linkage);
2481 if (data->compilation_failed()) {
2482 info()->AbortOptimization(BailoutReason::kCodeGenerationFailed);
2483 data->EndPhaseKind();
2487 if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2488 AllowHandleDereference allow_deref;
2489 TurboCfgFile tcf(isolate());
2490 tcf << AsC1V(
"CodeGen", data->schedule(), data->source_positions(),
2494 if (info()->trace_turbo_json_enabled()) {
2495 std::ostringstream source_position_output;
2497 if (data_->source_positions() !=
nullptr) {
2498 data_->source_positions()->PrintJson(source_position_output);
2500 source_position_output <<
"{}";
2502 source_position_output <<
",\n\"NodeOrigins\" : ";
2503 data_->node_origins()->PrintJson(source_position_output);
2504 data_->set_source_position_output(source_position_output.str());
2507 data->DeleteGraphZone();
2509 data->BeginPhaseKind(
"register allocation");
2511 bool run_verifier = FLAG_turbo_verify_allocation;
2514 if (call_descriptor->HasRestrictedAllocatableRegisters()) {
2515 RegList registers = call_descriptor->AllocatableRegisters();
2516 DCHECK_LT(0, NumRegs(registers));
2517 std::unique_ptr<const RegisterConfiguration> config;
2518 config.reset(RegisterConfiguration::RestrictGeneralRegisters(registers));
2519 AllocateRegisters(config.get(), call_descriptor, run_verifier);
2520 }
else if (data->info()->GetPoisoningMitigationLevel() !=
2521 PoisoningMitigationLevel::kDontPoison) {
2522 #ifdef V8_TARGET_ARCH_IA32 2523 FATAL(
"Poisoning is not supported on ia32.");
2525 AllocateRegisters(RegisterConfiguration::Poisoning(), call_descriptor,
2527 #endif // V8_TARGET_ARCH_IA32 2529 AllocateRegisters(RegisterConfiguration::Default(), call_descriptor,
2534 VerifyGeneratedCodeIsIdempotent();
2536 Run<FrameElisionPhase>();
2537 if (data->compilation_failed()) {
2538 info()->AbortOptimization(
2539 BailoutReason::kNotEnoughVirtualRegistersRegalloc);
2540 data->EndPhaseKind();
2545 bool generate_frame_at_start =
2546 data_->sequence()->instruction_blocks().front()->must_construct_frame();
2548 if (FLAG_turbo_jt) {
2549 Run<JumpThreadingPhase>(generate_frame_at_start);
2552 data->EndPhaseKind();
2557 void PipelineImpl::VerifyGeneratedCodeIsIdempotent() {
2558 PipelineData* data = this->data_;
2559 JumpOptimizationInfo* jump_opt = data->jump_optimization_info();
2560 if (jump_opt ==
nullptr)
return;
2562 InstructionSequence* code = data->sequence();
2563 int instruction_blocks = code->InstructionBlockCount();
2564 int virtual_registers = code->VirtualRegisterCount();
2565 size_t hash_code = base::hash_combine(instruction_blocks, virtual_registers);
2566 for (
auto instr : *code) {
2567 hash_code = base::hash_combine(hash_code, instr->opcode(),
2568 instr->InputCount(), instr->OutputCount());
2570 for (
int i = 0;
i < virtual_registers;
i++) {
2571 hash_code = base::hash_combine(hash_code, code->GetRepresentation(
i));
2573 if (jump_opt->is_collecting()) {
2574 jump_opt->set_hash_code(hash_code);
2576 CHECK_EQ(hash_code, jump_opt->hash_code());
2585 out <<
", \"instructionOffsetToPCOffset\": {";
2586 bool need_comma =
false;
2587 for (
size_t i = 0;
i < s.instr_starts->size(); ++
i) {
2588 if (need_comma) out <<
", ";
2589 int offset = (*s.instr_starts)[
i];
2590 out <<
"\"" <<
i <<
"\":" << offset;
2597 void PipelineImpl::AssembleCode(Linkage* linkage) {
2598 PipelineData* data = this->data_;
2599 data->BeginPhaseKind(
"code generation");
2600 data->InitializeCodeGenerator(linkage);
2602 Run<AssembleCodePhase>();
2603 if (data->info()->trace_turbo_json_enabled()) {
2604 TurboJsonFile json_of(data->info(), std::ios_base::app);
2605 json_of <<
"{\"name\":\"code generation\"" 2606 <<
", \"type\":\"instructions\"" 2607 << InstructionStartsAsJSON{&data->code_generator()->instr_starts()};
2610 data->DeleteInstructionZone();
2618 out <<
", \"blockIdToOffset\": {";
2619 bool need_comma =
false;
2620 for (
size_t i = 0;
i < s.block_starts->size(); ++
i) {
2621 if (need_comma) out <<
", ";
2622 int offset = (*s.block_starts)[
i];
2623 out <<
"\"" <<
i <<
"\":" << offset;
2630 MaybeHandle<Code> PipelineImpl::FinalizeCode() {
2631 PipelineData* data = this->data_;
2632 if (data->broker()) {
2633 data->broker()->Retire();
2635 Run<FinalizeCodePhase>();
2637 MaybeHandle<Code> maybe_code = data->code();
2639 if (!maybe_code.ToHandle(&code)) {
2643 if (data->profiler_data()) {
2644 #ifdef ENABLE_DISASSEMBLER 2645 std::ostringstream os;
2646 code->Disassemble(
nullptr, os);
2647 data->profiler_data()->SetCode(&os);
2648 #endif // ENABLE_DISASSEMBLER 2651 info()->SetCode(code);
2652 PrintCode(isolate(), code, info());
2654 if (info()->trace_turbo_json_enabled()) {
2655 TurboJsonFile json_of(info(), std::ios_base::app);
2657 json_of <<
"{\"name\":\"disassembly\",\"type\":\"disassembly\"" 2658 << BlockStartsAsJSON{&data->code_generator()->block_starts()}
2660 #ifdef ENABLE_DISASSEMBLER 2661 std::stringstream disassembly_stream;
2662 code->Disassemble(
nullptr, disassembly_stream);
2663 std::string disassembly_string(disassembly_stream.str());
2664 for (
const auto& c : disassembly_string) {
2665 json_of << AsEscapedUC16ForJSON(c);
2667 #endif // ENABLE_DISASSEMBLER 2668 json_of <<
"\"}\n],\n";
2669 json_of <<
"\"nodePositions\":";
2670 json_of << data->source_position_output() <<
",\n";
2671 JsonPrintAllSourceWithPositions(json_of, data->info(), isolate());
2674 if (info()->trace_turbo_json_enabled() ||
2675 info()->trace_turbo_graph_enabled()) {
2676 CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2677 OFStream os(tracing_scope.file());
2678 os <<
"---------------------------------------------------\n" 2679 <<
"Finished compiling method " << info()->GetDebugName().get()
2680 <<
" using Turbofan" << std::endl;
2685 MaybeHandle<Code> PipelineImpl::GenerateCode(CallDescriptor* call_descriptor) {
2686 Linkage linkage(call_descriptor);
2689 if (!SelectInstructions(&linkage))
return MaybeHandle<Code>();
2692 AssembleCode(&linkage);
2693 return FinalizeCode();
2696 bool PipelineImpl::CommitDependencies(Handle<Code> code) {
2697 return data_->dependencies() ==
nullptr ||
2698 data_->dependencies()->Commit(code);
2703 void TraceSequence(OptimizedCompilationInfo* info, PipelineData* data,
2704 const char* phase_name) {
2705 if (info->trace_turbo_json_enabled()) {
2706 AllowHandleDereference allow_deref;
2707 TurboJsonFile json_of(info, std::ios_base::app);
2708 json_of <<
"{\"name\":\"" << phase_name <<
"\",\"type\":\"sequence\",";
2709 json_of << InstructionSequenceAsJSON{data->sequence()};
2712 if (info->trace_turbo_graph_enabled()) {
2713 AllowHandleDereference allow_deref;
2714 CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2715 OFStream os(tracing_scope.file());
2716 os <<
"----- Instruction sequence " << phase_name <<
" -----\n" 2717 << *data->sequence();
2723 void PipelineImpl::AllocateRegisters(
const RegisterConfiguration* config,
2724 CallDescriptor* call_descriptor,
2725 bool run_verifier) {
2726 PipelineData* data = this->data_;
2728 std::unique_ptr<Zone> verifier_zone;
2729 RegisterAllocatorVerifier* verifier =
nullptr;
2731 verifier_zone.reset(
new Zone(data->allocator(), ZONE_NAME));
2732 verifier =
new (verifier_zone.get()) RegisterAllocatorVerifier(
2733 verifier_zone.get(), config, data->sequence());
2737 data_->sequence()->ValidateEdgeSplitForm();
2738 data_->sequence()->ValidateDeferredBlockEntryPaths();
2739 data_->sequence()->ValidateDeferredBlockExitPaths();
2742 data->InitializeRegisterAllocationData(config, call_descriptor);
2743 if (info()->is_osr()) data->osr_helper()->SetupFrame(data->frame());
2745 Run<MeetRegisterConstraintsPhase>();
2746 Run<ResolvePhisPhase>();
2747 Run<BuildLiveRangesPhase>();
2748 TraceSequence(info(), data,
"before register allocation");
2749 if (verifier !=
nullptr) {
2750 CHECK(!data->register_allocation_data()->ExistsUseWithoutDefinition());
2751 CHECK(data->register_allocation_data()
2752 ->RangesDefinedInDeferredStayInDeferred());
2755 if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2756 TurboCfgFile tcf(isolate());
2757 tcf << AsC1VRegisterAllocationData(
"PreAllocation",
2758 data->register_allocation_data());
2761 if (FLAG_turbo_preprocess_ranges) {
2762 Run<SplinterLiveRangesPhase>();
2763 if (info()->trace_turbo_json_enabled() &&
2764 !data->MayHaveUnverifiableGraph()) {
2765 TurboCfgFile tcf(isolate());
2766 tcf << AsC1VRegisterAllocationData(
"PostSplinter",
2767 data->register_allocation_data());
2771 Run<AllocateGeneralRegistersPhase<LinearScanAllocator>>();
2773 if (data->sequence()->HasFPVirtualRegisters()) {
2774 Run<AllocateFPRegistersPhase<LinearScanAllocator>>();
2777 if (FLAG_turbo_preprocess_ranges) {
2778 Run<MergeSplintersPhase>();
2781 Run<AssignSpillSlotsPhase>();
2783 Run<CommitAssignmentPhase>();
2788 if (verifier !=
nullptr) {
2789 verifier->VerifyAssignment(
"Immediately after CommitAssignmentPhase.");
2792 Run<PopulateReferenceMapsPhase>();
2793 Run<ConnectRangesPhase>();
2794 Run<ResolveControlFlowPhase>();
2795 if (FLAG_turbo_move_optimization) {
2796 Run<OptimizeMovesPhase>();
2799 Run<LocateSpillSlotsPhase>();
2801 TraceSequence(info(), data,
"after register allocation");
2803 if (verifier !=
nullptr) {
2804 verifier->VerifyAssignment(
"End of regalloc pipeline.");
2805 verifier->VerifyGapMoves();
2808 if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2809 TurboCfgFile tcf(isolate());
2810 tcf << AsC1VRegisterAllocationData(
"CodeGen",
2811 data->register_allocation_data());
2814 data->DeleteRegisterAllocationZone();
2817 OptimizedCompilationInfo* PipelineImpl::info()
const {
return data_->info(); }
2819 Isolate* PipelineImpl::isolate()
const {
return data_->isolate(); }
2821 CodeGenerator* PipelineImpl::code_generator()
const {
2822 return data_->code_generator();