V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
runtime-compiler.cc
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/arguments-inl.h"
6 #include "src/asmjs/asm-js.h"
7 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
8 #include "src/compiler.h"
9 #include "src/deoptimizer.h"
10 #include "src/frames-inl.h"
11 #include "src/isolate-inl.h"
12 #include "src/message-template.h"
13 #include "src/objects/js-array-buffer-inl.h"
14 #include "src/objects/js-array-inl.h"
15 #include "src/runtime/runtime-utils.h"
16 #include "src/v8threads.h"
17 #include "src/vm-state-inl.h"
18 
19 namespace v8 {
20 namespace internal {
21 
22 RUNTIME_FUNCTION(Runtime_CompileLazy) {
23  HandleScope scope(isolate);
24  DCHECK_EQ(1, args.length());
25  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
26 
27 #ifdef DEBUG
28  if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
29  PrintF("[unoptimized: ");
30  function->PrintName();
31  PrintF("]\n");
32  }
33 #endif
34 
35  StackLimitCheck check(isolate);
36  if (check.JsHasOverflowed(kStackSpaceRequiredForCompilation * KB)) {
37  return isolate->StackOverflow();
38  }
39  if (!Compiler::Compile(function, Compiler::KEEP_EXCEPTION)) {
40  return ReadOnlyRoots(isolate).exception();
41  }
42  DCHECK(function->is_compiled());
43  return function->code();
44 }
45 
46 RUNTIME_FUNCTION(Runtime_CompileOptimized_Concurrent) {
47  HandleScope scope(isolate);
48  DCHECK_EQ(1, args.length());
49  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
50  StackLimitCheck check(isolate);
51  if (check.JsHasOverflowed(kStackSpaceRequiredForCompilation * KB)) {
52  return isolate->StackOverflow();
53  }
54  if (!Compiler::CompileOptimized(function, ConcurrencyMode::kConcurrent)) {
55  return ReadOnlyRoots(isolate).exception();
56  }
57  DCHECK(function->is_compiled());
58  return function->code();
59 }
60 
61 RUNTIME_FUNCTION(Runtime_FunctionFirstExecution) {
62  HandleScope scope(isolate);
63  StackLimitCheck check(isolate);
64  DCHECK_EQ(1, args.length());
65 
66  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
67  DCHECK_EQ(function->feedback_vector()->optimization_marker(),
68  OptimizationMarker::kLogFirstExecution);
69  DCHECK(FLAG_log_function_events);
70  Handle<SharedFunctionInfo> sfi(function->shared(), isolate);
71  LOG(isolate, FunctionEvent(
72  "first-execution", Script::cast(sfi->script())->id(), 0,
73  sfi->StartPosition(), sfi->EndPosition(), sfi->DebugName()));
74  function->feedback_vector()->ClearOptimizationMarker();
75  // Return the code to continue execution, we don't care at this point whether
76  // this is for lazy compilation or has been eagerly complied.
77  return function->code();
78 }
79 
80 RUNTIME_FUNCTION(Runtime_CompileOptimized_NotConcurrent) {
81  HandleScope scope(isolate);
82  DCHECK_EQ(1, args.length());
83  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
84  StackLimitCheck check(isolate);
85  if (check.JsHasOverflowed(kStackSpaceRequiredForCompilation * KB)) {
86  return isolate->StackOverflow();
87  }
88  if (!Compiler::CompileOptimized(function, ConcurrencyMode::kNotConcurrent)) {
89  return ReadOnlyRoots(isolate).exception();
90  }
91  DCHECK(function->is_compiled());
92  return function->code();
93 }
94 
95 RUNTIME_FUNCTION(Runtime_EvictOptimizedCodeSlot) {
96  SealHandleScope scope(isolate);
97  DCHECK_EQ(1, args.length());
98  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
99 
100  DCHECK(function->shared()->is_compiled());
101 
102  function->feedback_vector()->EvictOptimizedCodeMarkedForDeoptimization(
103  function->shared(), "Runtime_EvictOptimizedCodeSlot");
104  return function->code();
105 }
106 
107 RUNTIME_FUNCTION(Runtime_InstantiateAsmJs) {
108  HandleScope scope(isolate);
109  DCHECK_EQ(args.length(), 4);
110  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
111 
112  Handle<JSReceiver> stdlib;
113  if (args[1]->IsJSReceiver()) {
114  stdlib = args.at<JSReceiver>(1);
115  }
116  Handle<JSReceiver> foreign;
117  if (args[2]->IsJSReceiver()) {
118  foreign = args.at<JSReceiver>(2);
119  }
120  Handle<JSArrayBuffer> memory;
121  if (args[3]->IsJSArrayBuffer()) {
122  memory = args.at<JSArrayBuffer>(3);
123  }
124  if (function->shared()->HasAsmWasmData()) {
125  Handle<SharedFunctionInfo> shared(function->shared(), isolate);
126  Handle<AsmWasmData> data(shared->asm_wasm_data(), isolate);
127  MaybeHandle<Object> result = AsmJs::InstantiateAsmWasm(
128  isolate, shared, data, stdlib, foreign, memory);
129  if (!result.is_null()) {
130  return *result.ToHandleChecked();
131  }
132  }
133  // Remove wasm data, mark as broken for asm->wasm, replace function code with
134  // UncompiledData, and return a smi 0 to indicate failure.
135  if (function->shared()->HasAsmWasmData()) {
136  SharedFunctionInfo::DiscardCompiled(isolate,
137  handle(function->shared(), isolate));
138  }
139  function->shared()->set_is_asm_wasm_broken(true);
140  DCHECK(function->code() ==
141  isolate->builtins()->builtin(Builtins::kInstantiateAsmJs));
142  function->set_code(isolate->builtins()->builtin(Builtins::kCompileLazy));
143  return Smi::kZero;
144 }
145 
146 RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
147  HandleScope scope(isolate);
148  DCHECK_EQ(0, args.length());
149  Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
150  DCHECK(deoptimizer->compiled_code()->kind() == Code::OPTIMIZED_FUNCTION);
151  DCHECK(deoptimizer->compiled_code()->is_turbofanned());
152  DCHECK(AllowHeapAllocation::IsAllowed());
153  DCHECK(isolate->context().is_null());
154 
155  TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
156  TRACE_EVENT0("v8", "V8.DeoptimizeCode");
157  Handle<JSFunction> function = deoptimizer->function();
158  DeoptimizeKind type = deoptimizer->deopt_kind();
159 
160  // TODO(turbofan): We currently need the native context to materialize
161  // the arguments object, but only to get to its map.
162  isolate->set_context(deoptimizer->function()->native_context());
163 
164  // Make sure to materialize objects before causing any allocation.
165  deoptimizer->MaterializeHeapObjects();
166  delete deoptimizer;
167 
168  // Ensure the context register is updated for materialized objects.
169  JavaScriptFrameIterator top_it(isolate);
170  JavaScriptFrame* top_frame = top_it.frame();
171  isolate->set_context(Context::cast(top_frame->context()));
172 
173  // Invalidate the underlying optimized code on non-lazy deopts.
174  if (type != DeoptimizeKind::kLazy) {
175  Deoptimizer::DeoptimizeFunction(*function);
176  }
177 
178  return ReadOnlyRoots(isolate).undefined_value();
179 }
180 
181 
182 static bool IsSuitableForOnStackReplacement(Isolate* isolate,
183  Handle<JSFunction> function) {
184  // Keep track of whether we've succeeded in optimizing.
185  if (function->shared()->optimization_disabled()) return false;
186  // If we are trying to do OSR when there are already optimized
187  // activations of the function, it means (a) the function is directly or
188  // indirectly recursive and (b) an optimized invocation has been
189  // deoptimized so that we are currently in an unoptimized activation.
190  // Check for optimized activations of this function.
191  for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
192  JavaScriptFrame* frame = it.frame();
193  if (frame->is_optimized() && frame->function() == *function) return false;
194  }
195 
196  return true;
197 }
198 
199 namespace {
200 
201 BailoutId DetermineEntryAndDisarmOSRForInterpreter(JavaScriptFrame* frame) {
202  InterpretedFrame* iframe = reinterpret_cast<InterpretedFrame*>(frame);
203 
204  // Note that the bytecode array active on the stack might be different from
205  // the one installed on the function (e.g. patched by debugger). This however
206  // is fine because we guarantee the layout to be in sync, hence any BailoutId
207  // representing the entry point will be valid for any copy of the bytecode.
208  Handle<BytecodeArray> bytecode(iframe->GetBytecodeArray(), iframe->isolate());
209 
210  DCHECK(frame->LookupCode()->is_interpreter_trampoline_builtin());
211  DCHECK(frame->function()->shared()->HasBytecodeArray());
212  DCHECK(frame->is_interpreted());
213 
214  // Reset the OSR loop nesting depth to disarm back edges.
215  bytecode->set_osr_loop_nesting_level(0);
216 
217  // Return a BailoutId representing the bytecode offset of the back branch.
218  return BailoutId(iframe->GetBytecodeOffset());
219 }
220 
221 } // namespace
222 
223 RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
224  HandleScope scope(isolate);
225  DCHECK_EQ(1, args.length());
226  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
227 
228  // Only reachable when OST is enabled.
229  CHECK(FLAG_use_osr);
230 
231  // Determine frame triggering OSR request.
232  JavaScriptFrameIterator it(isolate);
233  JavaScriptFrame* frame = it.frame();
234  DCHECK_EQ(frame->function(), *function);
235  DCHECK(frame->is_interpreted());
236 
237  // Determine the entry point for which this OSR request has been fired and
238  // also disarm all back edges in the calling code to stop new requests.
239  BailoutId ast_id = DetermineEntryAndDisarmOSRForInterpreter(frame);
240  DCHECK(!ast_id.IsNone());
241 
242  MaybeHandle<Code> maybe_result;
243  if (IsSuitableForOnStackReplacement(isolate, function)) {
244  if (FLAG_trace_osr) {
245  PrintF("[OSR - Compiling: ");
246  function->PrintName();
247  PrintF(" at AST id %d]\n", ast_id.ToInt());
248  }
249  maybe_result = Compiler::GetOptimizedCodeForOSR(function, ast_id, frame);
250  }
251 
252  // Check whether we ended up with usable optimized code.
253  Handle<Code> result;
254  if (maybe_result.ToHandle(&result) &&
255  result->kind() == Code::OPTIMIZED_FUNCTION) {
256  DeoptimizationData data =
257  DeoptimizationData::cast(result->deoptimization_data());
258 
259  if (data->OsrPcOffset()->value() >= 0) {
260  DCHECK(BailoutId(data->OsrBytecodeOffset()->value()) == ast_id);
261  if (FLAG_trace_osr) {
262  PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
263  ast_id.ToInt(), data->OsrPcOffset()->value());
264  }
265 
266  DCHECK(result->is_turbofanned());
267  if (!function->HasOptimizedCode()) {
268  // If we're not already optimized, set to optimize non-concurrently on
269  // the next call, otherwise we'd run unoptimized once more and
270  // potentially compile for OSR again.
271  if (FLAG_trace_osr) {
272  PrintF("[OSR - Re-marking ");
273  function->PrintName();
274  PrintF(" for non-concurrent optimization]\n");
275  }
276  function->SetOptimizationMarker(OptimizationMarker::kCompileOptimized);
277  }
278  return *result;
279  }
280  }
281 
282  // Failed.
283  if (FLAG_trace_osr) {
284  PrintF("[OSR - Failed: ");
285  function->PrintName();
286  PrintF(" at AST id %d]\n", ast_id.ToInt());
287  }
288 
289  if (!function->IsOptimized()) {
290  function->set_code(function->shared()->GetCode());
291  }
292  return nullptr;
293 }
294 
295 static Object* CompileGlobalEval(Isolate* isolate, Handle<String> source,
296  Handle<SharedFunctionInfo> outer_info,
297  LanguageMode language_mode,
298  int eval_scope_position, int eval_position) {
299  Handle<Context> context(isolate->context(), isolate);
300  Handle<Context> native_context(context->native_context(), isolate);
301 
302  // Check if native context allows code generation from
303  // strings. Throw an exception if it doesn't.
304  if (native_context->allow_code_gen_from_strings()->IsFalse(isolate) &&
305  !Compiler::CodeGenerationFromStringsAllowed(isolate, native_context,
306  source)) {
307  Handle<Object> error_message =
308  native_context->ErrorMessageForCodeGenerationFromStrings();
309  Handle<Object> error;
310  MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
311  MessageTemplate::kCodeGenFromStrings, error_message);
312  if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
313  return ReadOnlyRoots(isolate).exception();
314  }
315 
316  // Deal with a normal eval call with a string argument. Compile it
317  // and return the compiled function bound in the local context.
318  static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
319  Handle<JSFunction> compiled;
320  ASSIGN_RETURN_ON_EXCEPTION_VALUE(
321  isolate, compiled,
322  Compiler::GetFunctionFromEval(source, outer_info, context, language_mode,
323  restriction, kNoSourcePosition,
324  eval_scope_position, eval_position),
325  ReadOnlyRoots(isolate).exception());
326  return *compiled;
327 }
328 
329 
330 RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval) {
331  HandleScope scope(isolate);
332  DCHECK_EQ(6, args.length());
333 
334  Handle<Object> callee = args.at(0);
335 
336  // If "eval" didn't refer to the original GlobalEval, it's not a
337  // direct call to eval.
338  // (And even if it is, but the first argument isn't a string, just let
339  // execution default to an indirect call to eval, which will also return
340  // the first argument without doing anything).
341  if (*callee != isolate->native_context()->global_eval_fun() ||
342  !args[1]->IsString()) {
343  return *callee;
344  }
345 
346  DCHECK(args[3]->IsSmi());
347  DCHECK(is_valid_language_mode(args.smi_at(3)));
348  LanguageMode language_mode = static_cast<LanguageMode>(args.smi_at(3));
349  DCHECK(args[4]->IsSmi());
350  Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(),
351  isolate);
352  return CompileGlobalEval(isolate, args.at<String>(1), outer_info,
353  language_mode, args.smi_at(4), args.smi_at(5));
354 }
355 } // namespace internal
356 } // namespace v8
Definition: libplatform.h:13