5 #ifndef V8_OPTIMIZED_COMPILATION_INFO_H_ 6 #define V8_OPTIMIZED_COMPILATION_INFO_H_ 10 #include "src/bailout-reason.h" 11 #include "src/frames.h" 12 #include "src/globals.h" 13 #include "src/handles.h" 14 #include "src/objects.h" 15 #include "src/source-position-table.h" 16 #include "src/utils.h" 17 #include "src/vector.h" 22 class DeferredHandles;
23 class FunctionLiteral;
25 class JavaScriptFrame;
37 kAccessorInliningEnabled = 1 << 0,
38 kFunctionContextSpecializing = 1 << 1,
39 kInliningEnabled = 1 << 2,
40 kDisableFutureOptimization = 1 << 3,
41 kSplittingEnabled = 1 << 4,
42 kSourcePositionsEnabled = 1 << 5,
43 kBailoutOnUninitialized = 1 << 6,
44 kLoopPeelingEnabled = 1 << 7,
45 kUntrustedCodeMitigations = 1 << 8,
46 kSwitchJumpTableEnabled = 1 << 9,
47 kCalledWithCodeStartRegister = 1 << 10,
48 kPoisonRegisterArguments = 1 << 11,
49 kAllocationFoldingEnabled = 1 << 12,
50 kAnalyzeEnvironmentLiveness = 1 << 13,
51 kTraceTurboJson = 1 << 14,
52 kTraceTurboGraph = 1 << 15,
53 kTraceTurboScheduled = 1 << 16,
54 kWasmRuntimeExceptionSupport = 1 << 17
63 Code::Kind code_kind);
67 Zone* zone() {
return zone_; }
68 bool is_osr()
const {
return !osr_offset_.IsNone(); }
70 bool has_shared_info()
const {
return !shared_info().is_null(); }
72 bool has_bytecode_array()
const {
return !bytecode_array_.is_null(); }
75 Code::Kind code_kind()
const {
return code_kind_; }
76 uint32_t stub_key()
const {
return stub_key_; }
77 void set_stub_key(
uint32_t stub_key) { stub_key_ = stub_key; }
78 int32_t builtin_index()
const {
return builtin_index_; }
79 void set_builtin_index(int32_t index) { builtin_index_ = index; }
80 BailoutId osr_offset()
const {
return osr_offset_; }
85 void MarkAsFunctionContextSpecializing() {
86 SetFlag(kFunctionContextSpecializing);
88 bool is_function_context_specializing()
const {
89 return GetFlag(kFunctionContextSpecializing);
92 void MarkAsAccessorInliningEnabled() { SetFlag(kAccessorInliningEnabled); }
93 bool is_accessor_inlining_enabled()
const {
94 return GetFlag(kAccessorInliningEnabled);
97 void MarkAsSourcePositionsEnabled() { SetFlag(kSourcePositionsEnabled); }
98 bool is_source_positions_enabled()
const {
99 return GetFlag(kSourcePositionsEnabled);
102 void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
103 bool is_inlining_enabled()
const {
return GetFlag(kInliningEnabled); }
105 void SetPoisoningMitigationLevel(PoisoningMitigationLevel poisoning_level) {
106 poisoning_level_ = poisoning_level;
108 PoisoningMitigationLevel GetPoisoningMitigationLevel()
const {
109 return poisoning_level_;
112 void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }
113 bool is_splitting_enabled()
const {
return GetFlag(kSplittingEnabled); }
115 void MarkAsBailoutOnUninitialized() { SetFlag(kBailoutOnUninitialized); }
116 bool is_bailout_on_uninitialized()
const {
117 return GetFlag(kBailoutOnUninitialized);
120 void MarkAsLoopPeelingEnabled() { SetFlag(kLoopPeelingEnabled); }
121 bool is_loop_peeling_enabled()
const {
return GetFlag(kLoopPeelingEnabled); }
123 bool has_untrusted_code_mitigations()
const {
124 return GetFlag(kUntrustedCodeMitigations);
127 bool switch_jump_table_enabled()
const {
128 return GetFlag(kSwitchJumpTableEnabled);
131 bool called_with_code_start_register()
const {
132 bool enabled = GetFlag(kCalledWithCodeStartRegister);
136 void MarkAsPoisoningRegisterArguments() {
137 DCHECK(has_untrusted_code_mitigations());
138 SetFlag(kPoisonRegisterArguments);
140 bool is_poisoning_register_arguments()
const {
141 bool enabled = GetFlag(kPoisonRegisterArguments);
142 DCHECK_IMPLIES(enabled, has_untrusted_code_mitigations());
143 DCHECK_IMPLIES(enabled, called_with_code_start_register());
147 void MarkAsAllocationFoldingEnabled() { SetFlag(kAllocationFoldingEnabled); }
148 bool is_allocation_folding_enabled()
const {
149 return GetFlag(kAllocationFoldingEnabled);
152 void MarkAsAnalyzeEnvironmentLiveness() {
153 SetFlag(kAnalyzeEnvironmentLiveness);
155 bool is_analyze_environment_liveness()
const {
156 return GetFlag(kAnalyzeEnvironmentLiveness);
159 void SetWasmRuntimeExceptionSupport() {
160 SetFlag(kWasmRuntimeExceptionSupport);
163 bool wasm_runtime_exception_support() {
164 return GetFlag(kWasmRuntimeExceptionSupport);
167 bool trace_turbo_json_enabled()
const {
return GetFlag(kTraceTurboJson); }
169 bool trace_turbo_graph_enabled()
const {
return GetFlag(kTraceTurboGraph); }
171 bool trace_turbo_scheduled_enabled()
const {
172 return GetFlag(kTraceTurboScheduled);
179 bool has_context()
const;
182 bool has_native_context()
const;
183 Context native_context()
const;
185 bool has_global_object()
const;
189 bool IsOptimizing()
const {
return code_kind() == Code::OPTIMIZED_FUNCTION; }
190 bool IsWasm()
const {
return code_kind() == Code::WASM_FUNCTION; }
191 bool IsStub()
const {
192 return code_kind() != Code::OPTIMIZED_FUNCTION &&
193 code_kind() != Code::WASM_FUNCTION;
196 DCHECK(IsOptimizing());
197 osr_offset_ = osr_offset;
198 osr_frame_ = osr_frame;
201 void set_deferred_handles(std::shared_ptr<DeferredHandles> deferred_handles);
203 std::shared_ptr<DeferredHandles> deferred_handles() {
204 return deferred_handles_;
207 void ReopenHandlesInNewHandleScope(
Isolate* isolate);
209 void AbortOptimization(BailoutReason reason) {
210 DCHECK_NE(reason, BailoutReason::kNoReason);
211 if (bailout_reason_ == BailoutReason::kNoReason) bailout_reason_ = reason;
212 SetFlag(kDisableFutureOptimization);
215 void RetryOptimization(BailoutReason reason) {
216 DCHECK_NE(reason, BailoutReason::kNoReason);
217 if (GetFlag(kDisableFutureOptimization))
return;
218 bailout_reason_ = reason;
221 BailoutReason bailout_reason()
const {
return bailout_reason_; }
223 int optimization_id()
const {
224 DCHECK(IsOptimizing());
225 return optimization_id_;
237 : shared_info(inlined_shared_info), bytecode_array(inlined_bytecode) {
238 position.position = pos;
240 position.inlined_function_id = DeoptimizationData::kNotInlinedIndex;
243 void RegisterInlinedFunctionId(
size_t inlined_function_id) {
244 position.inlined_function_id =
static_cast<int>(inlined_function_id);
248 typedef std::vector<InlinedFunctionHolder> InlinedFunctionList;
249 InlinedFunctionList& inlined_functions() {
return inlined_functions_; }
256 std::unique_ptr<char[]> GetDebugName()
const;
258 StackFrame::Type GetOutputStackFrameType()
const;
260 const char* trace_turbo_filename()
const {
261 return trace_turbo_filename_.get();
264 void set_trace_turbo_filename(std::unique_ptr<
char[]> filename) {
265 trace_turbo_filename_ = std::move(filename);
269 OptimizedCompilationInfo(Code::Kind code_kind, Zone* zone);
270 void ConfigureFlags();
272 void SetFlag(Flag flag) { flags_ |= flag; }
273 bool GetFlag(Flag flag)
const {
return (flags_ & flag) != 0; }
275 void SetTracingFlags(
bool passes_filter);
279 PoisoningMitigationLevel poisoning_level_ =
280 PoisoningMitigationLevel::kDontPoison;
282 Code::Kind code_kind_;
284 int32_t builtin_index_ = -1;
288 Handle<BytecodeArray> bytecode_array_;
290 Handle<SharedFunctionInfo> shared_info_;
292 Handle<JSFunction> closure_;
298 BailoutId osr_offset_ = BailoutId::None();
304 std::shared_ptr<DeferredHandles> deferred_handles_;
306 BailoutReason bailout_reason_ = BailoutReason::kNoReason;
308 InlinedFunctionList inlined_functions_;
310 int optimization_id_ = -1;
313 JavaScriptFrame* osr_frame_ =
nullptr;
315 Vector<const char> debug_name_;
316 std::unique_ptr<char[]> trace_turbo_filename_;
318 DISALLOW_COPY_AND_ASSIGN(OptimizedCompilationInfo);
324 #endif // V8_OPTIMIZED_COMPILATION_INFO_H_