V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
bytecode-generator.cc
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/interpreter/bytecode-generator.h"
6 
7 #include "src/api-inl.h"
8 #include "src/ast/ast-source-ranges.h"
9 #include "src/ast/scopes.h"
10 #include "src/builtins/builtins-constructor.h"
11 #include "src/code-stubs.h"
12 #include "src/compiler.h"
13 #include "src/interpreter/bytecode-flags.h"
14 #include "src/interpreter/bytecode-jump-table.h"
15 #include "src/interpreter/bytecode-label.h"
16 #include "src/interpreter/bytecode-register-allocator.h"
17 #include "src/interpreter/control-flow-builders.h"
18 #include "src/objects-inl.h"
19 #include "src/objects/debug-objects.h"
20 #include "src/objects/literal-objects-inl.h"
21 #include "src/objects/smi.h"
22 #include "src/parsing/parse-info.h"
23 #include "src/parsing/token.h"
24 #include "src/unoptimized-compilation-info.h"
25 
26 namespace v8 {
27 namespace internal {
28 namespace interpreter {
29 
30 // Scoped class tracking context objects created by the visitor. Represents
31 // mutations of the context chain within the function body, allowing pushing and
32 // popping of the current {context_register} during visitation.
34  public:
35  ContextScope(BytecodeGenerator* generator, Scope* scope)
36  : generator_(generator),
37  scope_(scope),
38  outer_(generator_->execution_context()),
39  register_(Register::current_context()),
40  depth_(0) {
41  DCHECK(scope->NeedsContext() || outer_ == nullptr);
42  if (outer_) {
43  depth_ = outer_->depth_ + 1;
44 
45  // Push the outer context into a new context register.
46  Register outer_context_reg =
47  generator_->register_allocator()->NewRegister();
48  outer_->set_register(outer_context_reg);
49  generator_->builder()->PushContext(outer_context_reg);
50  }
51  generator_->set_execution_context(this);
52  }
53 
54  ~ContextScope() {
55  if (outer_) {
56  DCHECK_EQ(register_.index(), Register::current_context().index());
57  generator_->builder()->PopContext(outer_->reg());
58  outer_->set_register(register_);
59  }
60  generator_->set_execution_context(outer_);
61  }
62 
63  // Returns the depth of the given |scope| for the current execution context.
64  int ContextChainDepth(Scope* scope) {
65  return scope_->ContextChainLength(scope);
66  }
67 
68  // Returns the execution context at |depth| in the current context chain if it
69  // is a function local execution context, otherwise returns nullptr.
70  ContextScope* Previous(int depth) {
71  if (depth > depth_) {
72  return nullptr;
73  }
74 
75  ContextScope* previous = this;
76  for (int i = depth; i > 0; --i) {
77  previous = previous->outer_;
78  }
79  return previous;
80  }
81 
82  Register reg() const { return register_; }
83 
84  private:
85  const BytecodeArrayBuilder* builder() const { return generator_->builder(); }
86 
87  void set_register(Register reg) { register_ = reg; }
88 
89  BytecodeGenerator* generator_;
90  Scope* scope_;
91  ContextScope* outer_;
92  Register register_;
93  int depth_;
94 };
95 
96 // Scoped class for tracking control statements entered by the
97 // visitor. The pattern derives AstGraphBuilder::ControlScope.
99  public:
100  explicit ControlScope(BytecodeGenerator* generator)
101  : generator_(generator), outer_(generator->execution_control()),
102  context_(generator->execution_context()) {
103  generator_->set_execution_control(this);
104  }
105  virtual ~ControlScope() { generator_->set_execution_control(outer()); }
106 
107  void Break(Statement* stmt) {
108  PerformCommand(CMD_BREAK, stmt, kNoSourcePosition);
109  }
110  void Continue(Statement* stmt) {
111  PerformCommand(CMD_CONTINUE, stmt, kNoSourcePosition);
112  }
113  void ReturnAccumulator(int source_position = kNoSourcePosition) {
114  PerformCommand(CMD_RETURN, nullptr, source_position);
115  }
116  void AsyncReturnAccumulator(int source_position = kNoSourcePosition) {
117  PerformCommand(CMD_ASYNC_RETURN, nullptr, source_position);
118  }
119 
120  class DeferredCommands;
121 
122  protected:
123  enum Command {
124  CMD_BREAK,
125  CMD_CONTINUE,
126  CMD_RETURN,
127  CMD_ASYNC_RETURN,
128  CMD_RETHROW
129  };
130  static constexpr bool CommandUsesAccumulator(Command command) {
131  return command != CMD_BREAK && command != CMD_CONTINUE;
132  }
133 
134  void PerformCommand(Command command, Statement* statement,
135  int source_position);
136  virtual bool Execute(Command command, Statement* statement,
137  int source_position) = 0;
138 
139  // Helper to pop the context chain to a depth expected by this control scope.
140  // Note that it is the responsibility of each individual {Execute} method to
141  // trigger this when commands are handled and control-flow continues locally.
142  void PopContextToExpectedDepth();
143 
144  BytecodeGenerator* generator() const { return generator_; }
145  ControlScope* outer() const { return outer_; }
146  ContextScope* context() const { return context_; }
147 
148  private:
149  BytecodeGenerator* generator_;
150  ControlScope* outer_;
151  ContextScope* context_;
152 
153  DISALLOW_COPY_AND_ASSIGN(ControlScope);
154 };
155 
156 // Helper class for a try-finally control scope. It can record intercepted
157 // control-flow commands that cause entry into a finally-block, and re-apply
158 // them after again leaving that block. Special tokens are used to identify
159 // paths going through the finally-block to dispatch after leaving the block.
161  public:
162  DeferredCommands(BytecodeGenerator* generator, Register token_register,
163  Register result_register)
164  : generator_(generator),
165  deferred_(generator->zone()),
166  token_register_(token_register),
167  result_register_(result_register),
168  return_token_(-1),
169  async_return_token_(-1),
170  rethrow_token_(-1) {}
171 
172  // One recorded control-flow command.
173  struct Entry {
174  Command command; // The command type being applied on this path.
175  Statement* statement; // The target statement for the command or {nullptr}.
176  int token; // A token identifying this particular path.
177  };
178 
179  // Records a control-flow command while entering the finally-block. This also
180  // generates a new dispatch token that identifies one particular path. This
181  // expects the result to be in the accumulator.
182  void RecordCommand(Command command, Statement* statement) {
183  int token = GetTokenForCommand(command, statement);
184 
185  DCHECK_LT(token, deferred_.size());
186  DCHECK_EQ(deferred_[token].command, command);
187  DCHECK_EQ(deferred_[token].statement, statement);
188  DCHECK_EQ(deferred_[token].token, token);
189 
190  if (CommandUsesAccumulator(command)) {
191  builder()->StoreAccumulatorInRegister(result_register_);
192  }
193  builder()->LoadLiteral(Smi::FromInt(token));
194  builder()->StoreAccumulatorInRegister(token_register_);
195  if (!CommandUsesAccumulator(command)) {
196  // If we're not saving the accumulator in the result register, shove a
197  // harmless value there instead so that it is still considered "killed" in
198  // the liveness analysis. Normally we would LdaUndefined first, but the
199  // Smi token value is just as good, and by reusing it we save a bytecode.
200  builder()->StoreAccumulatorInRegister(result_register_);
201  }
202  }
203 
204  // Records the dispatch token to be used to identify the re-throw path when
205  // the finally-block has been entered through the exception handler. This
206  // expects the exception to be in the accumulator.
207  void RecordHandlerReThrowPath() {
208  // The accumulator contains the exception object.
209  RecordCommand(CMD_RETHROW, nullptr);
210  }
211 
212  // Records the dispatch token to be used to identify the implicit fall-through
213  // path at the end of a try-block into the corresponding finally-block.
214  void RecordFallThroughPath() {
215  builder()->LoadLiteral(Smi::FromInt(-1));
216  builder()->StoreAccumulatorInRegister(token_register_);
217  // Since we're not saving the accumulator in the result register, shove a
218  // harmless value there instead so that it is still considered "killed" in
219  // the liveness analysis. Normally we would LdaUndefined first, but the Smi
220  // token value is just as good, and by reusing it we save a bytecode.
221  builder()->StoreAccumulatorInRegister(result_register_);
222  }
223 
224  // Applies all recorded control-flow commands after the finally-block again.
225  // This generates a dynamic dispatch on the token from the entry point.
226  void ApplyDeferredCommands() {
227  if (deferred_.size() == 0) return;
228 
229  BytecodeLabel fall_through;
230 
231  if (deferred_.size() == 1) {
232  // For a single entry, just jump to the fallthrough if we don't match the
233  // entry token.
234  const Entry& entry = deferred_[0];
235 
236  builder()
237  ->LoadLiteral(Smi::FromInt(entry.token))
238  .CompareReference(token_register_)
239  .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &fall_through);
240 
241  if (CommandUsesAccumulator(entry.command)) {
242  builder()->LoadAccumulatorWithRegister(result_register_);
243  }
244  execution_control()->PerformCommand(entry.command, entry.statement,
245  kNoSourcePosition);
246  } else {
247  // For multiple entries, build a jump table and switch on the token,
248  // jumping to the fallthrough if none of them match.
249 
250  BytecodeJumpTable* jump_table =
251  builder()->AllocateJumpTable(static_cast<int>(deferred_.size()), 0);
252  builder()
253  ->LoadAccumulatorWithRegister(token_register_)
254  .SwitchOnSmiNoFeedback(jump_table)
255  .Jump(&fall_through);
256  for (const Entry& entry : deferred_) {
257  builder()->Bind(jump_table, entry.token);
258 
259  if (CommandUsesAccumulator(entry.command)) {
260  builder()->LoadAccumulatorWithRegister(result_register_);
261  }
262  execution_control()->PerformCommand(entry.command, entry.statement,
263  kNoSourcePosition);
264  }
265  }
266 
267  builder()->Bind(&fall_through);
268  }
269 
270  BytecodeArrayBuilder* builder() { return generator_->builder(); }
271  ControlScope* execution_control() { return generator_->execution_control(); }
272 
273  private:
274  int GetTokenForCommand(Command command, Statement* statement) {
275  switch (command) {
276  case CMD_RETURN:
277  return GetReturnToken();
278  case CMD_ASYNC_RETURN:
279  return GetAsyncReturnToken();
280  case CMD_RETHROW:
281  return GetRethrowToken();
282  default:
283  // TODO(leszeks): We could also search for entries with the same
284  // command and statement.
285  return GetNewTokenForCommand(command, statement);
286  }
287  }
288 
289  int GetReturnToken() {
290  if (return_token_ == -1) {
291  return_token_ = GetNewTokenForCommand(CMD_RETURN, nullptr);
292  }
293  return return_token_;
294  }
295 
296  int GetAsyncReturnToken() {
297  if (async_return_token_ == -1) {
298  async_return_token_ = GetNewTokenForCommand(CMD_ASYNC_RETURN, nullptr);
299  }
300  return async_return_token_;
301  }
302 
303  int GetRethrowToken() {
304  if (rethrow_token_ == -1) {
305  rethrow_token_ = GetNewTokenForCommand(CMD_RETHROW, nullptr);
306  }
307  return rethrow_token_;
308  }
309 
310  int GetNewTokenForCommand(Command command, Statement* statement) {
311  int token = static_cast<int>(deferred_.size());
312  deferred_.push_back({command, statement, token});
313  return token;
314  }
315 
316  BytecodeGenerator* generator_;
317  ZoneVector<Entry> deferred_;
318  Register token_register_;
319  Register result_register_;
320 
321  // Tokens for commands that don't need a statement.
322  int return_token_;
323  int async_return_token_;
324  int rethrow_token_;
325 };
326 
327 // Scoped class for dealing with control flow reaching the function level.
330  public:
331  explicit ControlScopeForTopLevel(BytecodeGenerator* generator)
332  : ControlScope(generator) {}
333 
334  protected:
335  bool Execute(Command command, Statement* statement,
336  int source_position) override {
337  switch (command) {
338  case CMD_BREAK: // We should never see break/continue in top-level.
339  case CMD_CONTINUE:
340  UNREACHABLE();
341  case CMD_RETURN:
342  // No need to pop contexts, execution leaves the method body.
343  generator()->BuildReturn(source_position);
344  return true;
345  case CMD_ASYNC_RETURN:
346  // No need to pop contexts, execution leaves the method body.
347  generator()->BuildAsyncReturn(source_position);
348  return true;
349  case CMD_RETHROW:
350  // No need to pop contexts, execution leaves the method body.
351  generator()->BuildReThrow();
352  return true;
353  }
354  return false;
355  }
356 };
357 
358 // Scoped class for enabling break inside blocks and switch blocks.
361  public:
363  BreakableStatement* statement,
364  BreakableControlFlowBuilder* control_builder)
365  : ControlScope(generator),
366  statement_(statement),
367  control_builder_(control_builder) {}
368 
369  protected:
370  bool Execute(Command command, Statement* statement,
371  int source_position) override {
372  control_builder_->set_needs_continuation_counter();
373  if (statement != statement_) return false;
374  switch (command) {
375  case CMD_BREAK:
376  PopContextToExpectedDepth();
377  control_builder_->Break();
378  return true;
379  case CMD_CONTINUE:
380  case CMD_RETURN:
381  case CMD_ASYNC_RETURN:
382  case CMD_RETHROW:
383  break;
384  }
385  return false;
386  }
387 
388  private:
389  Statement* statement_;
390  BreakableControlFlowBuilder* control_builder_;
391 };
392 
393 // Scoped class for enabling 'break' and 'continue' in iteration
394 // constructs, e.g. do...while, while..., for...
397  public:
399  IterationStatement* statement,
400  LoopBuilder* loop_builder)
401  : ControlScope(generator),
402  statement_(statement),
403  loop_builder_(loop_builder) {
404  generator->loop_depth_++;
405  }
406  ~ControlScopeForIteration() override { generator()->loop_depth_--; }
407 
408  protected:
409  bool Execute(Command command, Statement* statement,
410  int source_position) override {
411  if (statement != statement_) return false;
412  switch (command) {
413  case CMD_BREAK:
414  PopContextToExpectedDepth();
415  loop_builder_->Break();
416  return true;
417  case CMD_CONTINUE:
418  PopContextToExpectedDepth();
419  loop_builder_->Continue();
420  return true;
421  case CMD_RETURN:
422  case CMD_ASYNC_RETURN:
423  case CMD_RETHROW:
424  break;
425  }
426  return false;
427  }
428 
429  private:
430  Statement* statement_;
431  LoopBuilder* loop_builder_;
432 };
433 
434 // Scoped class for enabling 'throw' in try-catch constructs.
437  public:
439  TryCatchBuilder* try_catch_builder)
440  : ControlScope(generator) {}
441 
442  protected:
443  bool Execute(Command command, Statement* statement,
444  int source_position) override {
445  switch (command) {
446  case CMD_BREAK:
447  case CMD_CONTINUE:
448  case CMD_RETURN:
449  case CMD_ASYNC_RETURN:
450  break;
451  case CMD_RETHROW:
452  // No need to pop contexts, execution re-enters the method body via the
453  // stack unwinding mechanism which itself restores contexts correctly.
454  generator()->BuildReThrow();
455  return true;
456  }
457  return false;
458  }
459 };
460 
461 // Scoped class for enabling control flow through try-finally constructs.
464  public:
466  TryFinallyBuilder* try_finally_builder,
467  DeferredCommands* commands)
468  : ControlScope(generator),
469  try_finally_builder_(try_finally_builder),
470  commands_(commands) {}
471 
472  protected:
473  bool Execute(Command command, Statement* statement,
474  int source_position) override {
475  switch (command) {
476  case CMD_BREAK:
477  case CMD_CONTINUE:
478  case CMD_RETURN:
479  case CMD_ASYNC_RETURN:
480  case CMD_RETHROW:
481  PopContextToExpectedDepth();
482  // We don't record source_position here since we don't generate return
483  // bytecode right here and will generate it later as part of finally
484  // block. Each return bytecode generated in finally block will get own
485  // return source position from corresponded return statement or we'll
486  // use end of function if no return statement is presented.
487  commands_->RecordCommand(command, statement);
488  try_finally_builder_->LeaveTry();
489  return true;
490  }
491  return false;
492  }
493 
494  private:
495  TryFinallyBuilder* try_finally_builder_;
496  DeferredCommands* commands_;
497 };
498 
499 // Allocate and fetch the coverage indices tracking NaryLogical Expressions.
501  public:
503  : generator_(generator) {
504  if (generator_->block_coverage_builder_ == nullptr) return;
505  for (size_t i = 0; i < expr->subsequent_length(); i++) {
506  coverage_slots_.push_back(
507  generator_->AllocateNaryBlockCoverageSlotIfEnabled(expr, i));
508  }
509  }
510 
511  int GetSlotFor(size_t subsequent_expr_index) const {
512  if (generator_->block_coverage_builder_ == nullptr) {
513  return BlockCoverageBuilder::kNoCoverageArraySlot;
514  }
515  DCHECK(coverage_slots_.size() > subsequent_expr_index);
516  return coverage_slots_[subsequent_expr_index];
517  }
518 
519  private:
520  BytecodeGenerator* generator_;
521  std::vector<int> coverage_slots_;
522 };
523 
524 void BytecodeGenerator::ControlScope::PerformCommand(Command command,
525  Statement* statement,
526  int source_position) {
527  ControlScope* current = this;
528  do {
529  if (current->Execute(command, statement, source_position)) {
530  return;
531  }
532  current = current->outer();
533  } while (current != nullptr);
534  UNREACHABLE();
535 }
536 
537 void BytecodeGenerator::ControlScope::PopContextToExpectedDepth() {
538  // Pop context to the expected depth. Note that this can in fact pop multiple
539  // contexts at once because the {PopContext} bytecode takes a saved register.
540  if (generator()->execution_context() != context()) {
541  generator()->builder()->PopContext(context()->reg());
542  }
543 }
544 
546  public:
547  explicit RegisterAllocationScope(BytecodeGenerator* generator)
548  : generator_(generator),
549  outer_next_register_index_(
550  generator->register_allocator()->next_register_index()) {}
551 
553  generator_->register_allocator()->ReleaseRegisters(
554  outer_next_register_index_);
555  }
556 
557  private:
558  BytecodeGenerator* generator_;
559  int outer_next_register_index_;
560 
561  DISALLOW_COPY_AND_ASSIGN(RegisterAllocationScope);
562 };
563 
564 // Scoped base class for determining how the result of an expression will be
565 // used.
567  public:
568  ExpressionResultScope(BytecodeGenerator* generator, Expression::Context kind)
569  : generator_(generator),
570  outer_(generator->execution_result()),
571  allocator_(generator),
572  kind_(kind),
573  type_hint_(TypeHint::kAny) {
574  generator_->set_execution_result(this);
575  }
576 
577  virtual ~ExpressionResultScope() {
578  generator_->set_execution_result(outer_);
579  }
580 
581  bool IsEffect() const { return kind_ == Expression::kEffect; }
582  bool IsValue() const { return kind_ == Expression::kValue; }
583  bool IsTest() const { return kind_ == Expression::kTest; }
584 
585  TestResultScope* AsTest() {
586  DCHECK(IsTest());
587  return reinterpret_cast<TestResultScope*>(this);
588  }
589 
590  // Specify expression always returns a Boolean result value.
591  void SetResultIsBoolean() {
592  DCHECK_EQ(type_hint_, TypeHint::kAny);
593  type_hint_ = TypeHint::kBoolean;
594  }
595 
596  void SetResultIsString() {
597  DCHECK_EQ(type_hint_, TypeHint::kAny);
598  type_hint_ = TypeHint::kString;
599  }
600 
601  TypeHint type_hint() const { return type_hint_; }
602 
603  private:
604  BytecodeGenerator* generator_;
605  ExpressionResultScope* outer_;
606  RegisterAllocationScope allocator_;
607  Expression::Context kind_;
608  TypeHint type_hint_;
609 
610  DISALLOW_COPY_AND_ASSIGN(ExpressionResultScope);
611 };
612 
613 // Scoped class used when the result of the current expression is not
614 // expected to produce a result.
616  : public ExpressionResultScope {
617  public:
618  explicit EffectResultScope(BytecodeGenerator* generator)
619  : ExpressionResultScope(generator, Expression::kEffect) {}
620 };
621 
622 // Scoped class used when the result of the current expression to be
623 // evaluated should go into the interpreter's accumulator.
625  public:
626  explicit ValueResultScope(BytecodeGenerator* generator)
627  : ExpressionResultScope(generator, Expression::kValue) {}
628 };
629 
630 // Scoped class used when the result of the current expression to be
631 // evaluated is only tested with jumps to two branches.
633  public:
634  TestResultScope(BytecodeGenerator* generator, BytecodeLabels* then_labels,
635  BytecodeLabels* else_labels, TestFallthrough fallthrough)
636  : ExpressionResultScope(generator, Expression::kTest),
637  result_consumed_by_test_(false),
638  fallthrough_(fallthrough),
639  then_labels_(then_labels),
640  else_labels_(else_labels) {}
641 
642  // Used when code special cases for TestResultScope and consumes any
643  // possible value by testing and jumping to a then/else label.
644  void SetResultConsumedByTest() {
645  result_consumed_by_test_ = true;
646  }
647  bool result_consumed_by_test() { return result_consumed_by_test_; }
648 
649  // Inverts the control flow of the operation, swapping the then and else
650  // labels and the fallthrough.
651  void InvertControlFlow() {
652  std::swap(then_labels_, else_labels_);
653  fallthrough_ = inverted_fallthrough();
654  }
655 
656  BytecodeLabel* NewThenLabel() { return then_labels_->New(); }
657  BytecodeLabel* NewElseLabel() { return else_labels_->New(); }
658 
659  BytecodeLabels* then_labels() const { return then_labels_; }
660  BytecodeLabels* else_labels() const { return else_labels_; }
661 
662  void set_then_labels(BytecodeLabels* then_labels) {
663  then_labels_ = then_labels;
664  }
665  void set_else_labels(BytecodeLabels* else_labels) {
666  else_labels_ = else_labels;
667  }
668 
669  TestFallthrough fallthrough() const { return fallthrough_; }
670  TestFallthrough inverted_fallthrough() const {
671  switch (fallthrough_) {
672  case TestFallthrough::kThen:
673  return TestFallthrough::kElse;
674  case TestFallthrough::kElse:
675  return TestFallthrough::kThen;
676  default:
677  return TestFallthrough::kNone;
678  }
679  }
680  void set_fallthrough(TestFallthrough fallthrough) {
681  fallthrough_ = fallthrough;
682  }
683 
684  private:
685  bool result_consumed_by_test_;
686  TestFallthrough fallthrough_;
687  BytecodeLabels* then_labels_;
688  BytecodeLabels* else_labels_;
689 
690  DISALLOW_COPY_AND_ASSIGN(TestResultScope);
691 };
692 
693 // Used to build a list of global declaration initial value pairs.
695  public:
696  explicit GlobalDeclarationsBuilder(Zone* zone)
697  : declarations_(0, zone),
698  constant_pool_entry_(0),
699  has_constant_pool_entry_(false) {}
700 
701  void AddFunctionDeclaration(const AstRawString* name, FeedbackSlot slot,
702  FeedbackSlot literal_slot,
703  FunctionLiteral* func) {
704  DCHECK(!slot.IsInvalid());
705  declarations_.push_back(Declaration(name, slot, literal_slot, func));
706  }
707 
708  void AddUndefinedDeclaration(const AstRawString* name, FeedbackSlot slot) {
709  DCHECK(!slot.IsInvalid());
710  declarations_.push_back(Declaration(name, slot, nullptr));
711  }
712 
713  Handle<FixedArray> AllocateDeclarations(UnoptimizedCompilationInfo* info,
714  Handle<Script> script,
715  Isolate* isolate) {
716  DCHECK(has_constant_pool_entry_);
717  int array_index = 0;
718  Handle<FixedArray> data = isolate->factory()->NewFixedArray(
719  static_cast<int>(declarations_.size() * 4), TENURED);
720  for (const Declaration& declaration : declarations_) {
721  FunctionLiteral* func = declaration.func;
722  Handle<Object> initial_value;
723  if (func == nullptr) {
724  initial_value = isolate->factory()->undefined_value();
725  } else {
726  initial_value = Compiler::GetSharedFunctionInfo(func, script, isolate);
727  }
728 
729  // Return a null handle if any initial values can't be created. Caller
730  // will set stack overflow.
731  if (initial_value.is_null()) return Handle<FixedArray>();
732 
733  data->set(array_index++, *declaration.name->string());
734  data->set(array_index++, Smi::FromInt(declaration.slot.ToInt()));
735  Object* undefined_or_literal_slot;
736  if (declaration.literal_slot.IsInvalid()) {
737  undefined_or_literal_slot = ReadOnlyRoots(isolate).undefined_value();
738  } else {
739  undefined_or_literal_slot =
740  Smi::FromInt(declaration.literal_slot.ToInt());
741  }
742  data->set(array_index++, undefined_or_literal_slot);
743  data->set(array_index++, *initial_value);
744  }
745  return data;
746  }
747 
748  size_t constant_pool_entry() {
749  DCHECK(has_constant_pool_entry_);
750  return constant_pool_entry_;
751  }
752 
753  void set_constant_pool_entry(size_t constant_pool_entry) {
754  DCHECK(!empty());
755  DCHECK(!has_constant_pool_entry_);
756  constant_pool_entry_ = constant_pool_entry;
757  has_constant_pool_entry_ = true;
758  }
759 
760  bool empty() { return declarations_.empty(); }
761 
762  private:
763  struct Declaration {
764  Declaration() : slot(FeedbackSlot::Invalid()), func(nullptr) {}
765  Declaration(const AstRawString* name, FeedbackSlot slot,
766  FeedbackSlot literal_slot, FunctionLiteral* func)
767  : name(name), slot(slot), literal_slot(literal_slot), func(func) {}
768  Declaration(const AstRawString* name, FeedbackSlot slot,
769  FunctionLiteral* func)
770  : name(name),
771  slot(slot),
772  literal_slot(FeedbackSlot::Invalid()),
773  func(func) {}
774 
775  const AstRawString* name;
776  FeedbackSlot slot;
777  FeedbackSlot literal_slot;
778  FunctionLiteral* func;
779  };
780  ZoneVector<Declaration> declarations_;
781  size_t constant_pool_entry_;
782  bool has_constant_pool_entry_;
783 };
784 
786  public:
787  CurrentScope(BytecodeGenerator* generator, Scope* scope)
788  : generator_(generator), outer_scope_(generator->current_scope()) {
789  if (scope != nullptr) {
790  DCHECK_EQ(outer_scope_, scope->outer_scope());
791  generator_->set_current_scope(scope);
792  }
793  }
794  ~CurrentScope() {
795  if (outer_scope_ != generator_->current_scope()) {
796  generator_->set_current_scope(outer_scope_);
797  }
798  }
799 
800  private:
801  BytecodeGenerator* generator_;
802  Scope* outer_scope_;
803 };
804 
806  public:
807  explicit FeedbackSlotCache(Zone* zone) : map_(zone) {}
808 
809  void Put(FeedbackSlotKind slot_kind, Variable* variable, FeedbackSlot slot) {
810  PutImpl(slot_kind, 0, variable, slot);
811  }
812  void Put(FeedbackSlotKind slot_kind, AstNode* node, FeedbackSlot slot) {
813  PutImpl(slot_kind, 0, node, slot);
814  }
815  void Put(FeedbackSlotKind slot_kind, int variable_index,
816  const AstRawString* name, FeedbackSlot slot) {
817  PutImpl(slot_kind, variable_index, name, slot);
818  }
819 
820  FeedbackSlot Get(FeedbackSlotKind slot_kind, Variable* variable) const {
821  return GetImpl(slot_kind, 0, variable);
822  }
823  FeedbackSlot Get(FeedbackSlotKind slot_kind, AstNode* node) const {
824  return GetImpl(slot_kind, 0, node);
825  }
826  FeedbackSlot Get(FeedbackSlotKind slot_kind, int variable_index,
827  const AstRawString* name) const {
828  return GetImpl(slot_kind, variable_index, name);
829  }
830 
831  private:
832  typedef std::tuple<FeedbackSlotKind, int, const void*> Key;
833 
834  void PutImpl(FeedbackSlotKind slot_kind, int index, const void* node,
835  FeedbackSlot slot) {
836  Key key = std::make_tuple(slot_kind, index, node);
837  auto entry = std::make_pair(key, slot);
838  map_.insert(entry);
839  }
840 
841  FeedbackSlot GetImpl(FeedbackSlotKind slot_kind, int index,
842  const void* node) const {
843  Key key = std::make_tuple(slot_kind, index, node);
844  auto iter = map_.find(key);
845  if (iter != map_.end()) {
846  return iter->second;
847  }
848  return FeedbackSlot();
849  }
850 
852 };
853 
855  public:
856  IteratorRecord(Register object_register, Register next_register,
857  IteratorType type = IteratorType::kNormal)
858  : type_(type), object_(object_register), next_(next_register) {
859  DCHECK(object_.is_valid() && next_.is_valid());
860  }
861 
862  inline IteratorType type() const { return type_; }
863  inline Register object() const { return object_; }
864  inline Register next() const { return next_; }
865 
866  private:
867  IteratorType type_;
868  Register object_;
869  Register next_;
870 };
871 
872 #ifdef DEBUG
873 
874 static bool IsInEagerLiterals(
875  FunctionLiteral* literal,
876  const std::vector<FunctionLiteral*>& eager_literals) {
877  for (FunctionLiteral* eager_literal : eager_literals) {
878  if (literal == eager_literal) return true;
879  }
880  return false;
881 }
882 
883 #endif // DEBUG
884 
885 BytecodeGenerator::BytecodeGenerator(
887  const AstStringConstants* ast_string_constants,
888  std::vector<FunctionLiteral*>* eager_inner_literals)
889  : zone_(info->zone()),
890  builder_(zone(), info->num_parameters_including_this(),
891  info->scope()->num_stack_slots(), info->feedback_vector_spec(),
892  info->SourcePositionRecordingMode()),
893  info_(info),
894  ast_string_constants_(ast_string_constants),
895  closure_scope_(info->scope()),
896  current_scope_(info->scope()),
897  eager_inner_literals_(eager_inner_literals),
898  feedback_slot_cache_(new (zone()) FeedbackSlotCache(zone())),
899  globals_builder_(new (zone()) GlobalDeclarationsBuilder(zone())),
900  block_coverage_builder_(nullptr),
901  global_declarations_(0, zone()),
902  function_literals_(0, zone()),
903  native_function_literals_(0, zone()),
904  object_literals_(0, zone()),
905  array_literals_(0, zone()),
906  class_literals_(0, zone()),
907  template_objects_(0, zone()),
908  execution_control_(nullptr),
909  execution_context_(nullptr),
910  execution_result_(nullptr),
911  incoming_new_target_or_generator_(),
912  dummy_feedback_slot_(feedback_spec(), FeedbackSlotKind::kCompareOp),
913  generator_jump_table_(nullptr),
914  suspend_count_(0),
915  loop_depth_(0),
916  catch_prediction_(HandlerTable::UNCAUGHT) {
917  DCHECK_EQ(closure_scope(), closure_scope()->GetClosureScope());
918  if (info->has_source_range_map()) {
919  block_coverage_builder_ = new (zone())
920  BlockCoverageBuilder(zone(), builder(), info->source_range_map());
921  }
922 }
923 
924 Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode(
925  Isolate* isolate, Handle<Script> script) {
926  DCHECK(ThreadId::Current().Equals(isolate->thread_id()));
927 #ifdef DEBUG
928  // Unoptimized compilation should be context-independent. Verify that we don't
929  // access the native context by nulling it out during finalization.
930  SaveContext save(isolate);
931  isolate->set_context(Context());
932 #endif
933 
934  AllocateDeferredConstants(isolate, script);
935 
936  if (block_coverage_builder_) {
937  info()->set_coverage_info(
938  isolate->factory()->NewCoverageInfo(block_coverage_builder_->slots()));
939  if (FLAG_trace_block_coverage) {
940  info()->coverage_info()->Print(info()->literal()->GetDebugName());
941  }
942  }
943 
944  if (HasStackOverflow()) return Handle<BytecodeArray>();
945  Handle<BytecodeArray> bytecode_array = builder()->ToBytecodeArray(isolate);
946 
947  if (incoming_new_target_or_generator_.is_valid()) {
948  bytecode_array->set_incoming_new_target_or_generator_register(
949  incoming_new_target_or_generator_);
950  }
951 
952  return bytecode_array;
953 }
954 
955 void BytecodeGenerator::AllocateDeferredConstants(Isolate* isolate,
956  Handle<Script> script) {
957  // Build global declaration pair arrays.
958  for (GlobalDeclarationsBuilder* globals_builder : global_declarations_) {
959  Handle<FixedArray> declarations =
960  globals_builder->AllocateDeclarations(info(), script, isolate);
961  if (declarations.is_null()) return SetStackOverflow();
962  builder()->SetDeferredConstantPoolEntry(
963  globals_builder->constant_pool_entry(), declarations);
964  }
965 
966  // Find or build shared function infos.
967  for (std::pair<FunctionLiteral*, size_t> literal : function_literals_) {
968  FunctionLiteral* expr = literal.first;
969  Handle<SharedFunctionInfo> shared_info =
970  Compiler::GetSharedFunctionInfo(expr, script, isolate);
971  if (shared_info.is_null()) return SetStackOverflow();
972  builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
973  }
974 
975  // Find or build shared function infos for the native function templates.
976  for (std::pair<NativeFunctionLiteral*, size_t> literal :
977  native_function_literals_) {
978  NativeFunctionLiteral* expr = literal.first;
979  v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
980 
981  // Compute the function template for the native function.
983  expr->extension()->GetNativeFunctionTemplate(
984  v8_isolate, Utils::ToLocal(expr->name()));
985  DCHECK(!info.IsEmpty());
986 
987  Handle<SharedFunctionInfo> shared_info =
988  FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(
989  isolate, Utils::OpenHandle(*info), expr->name());
990  DCHECK(!shared_info.is_null());
991  builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
992  }
993 
994  // Build object literal constant properties
995  for (std::pair<ObjectLiteral*, size_t> literal : object_literals_) {
996  ObjectLiteral* object_literal = literal.first;
997  if (object_literal->properties_count() > 0) {
998  // If constant properties is an empty fixed array, we've already added it
999  // to the constant pool when visiting the object literal.
1000  Handle<ObjectBoilerplateDescription> constant_properties =
1001  object_literal->GetOrBuildBoilerplateDescription(isolate);
1002 
1003  builder()->SetDeferredConstantPoolEntry(literal.second,
1004  constant_properties);
1005  }
1006  }
1007 
1008  // Build array literal constant elements
1009  for (std::pair<ArrayLiteral*, size_t> literal : array_literals_) {
1010  ArrayLiteral* array_literal = literal.first;
1011  Handle<ArrayBoilerplateDescription> constant_elements =
1012  array_literal->GetOrBuildBoilerplateDescription(isolate);
1013  builder()->SetDeferredConstantPoolEntry(literal.second, constant_elements);
1014  }
1015 
1016  // Build class literal boilerplates.
1017  for (std::pair<ClassLiteral*, size_t> literal : class_literals_) {
1018  ClassLiteral* class_literal = literal.first;
1019  Handle<ClassBoilerplate> class_boilerplate =
1020  ClassBoilerplate::BuildClassBoilerplate(isolate, class_literal);
1021  builder()->SetDeferredConstantPoolEntry(literal.second, class_boilerplate);
1022  }
1023 
1024  // Build template literals.
1025  for (std::pair<GetTemplateObject*, size_t> literal : template_objects_) {
1026  GetTemplateObject* get_template_object = literal.first;
1027  Handle<TemplateObjectDescription> description =
1028  get_template_object->GetOrBuildDescription(isolate);
1029  builder()->SetDeferredConstantPoolEntry(literal.second, description);
1030  }
1031 }
1032 
1033 void BytecodeGenerator::GenerateBytecode(uintptr_t stack_limit) {
1034  DisallowHeapAllocation no_allocation;
1035  DisallowHandleAllocation no_handles;
1036  DisallowHandleDereference no_deref;
1037 
1038  InitializeAstVisitor(stack_limit);
1039 
1040  // Initialize the incoming context.
1041  ContextScope incoming_context(this, closure_scope());
1042 
1043  // Initialize control scope.
1044  ControlScopeForTopLevel control(this);
1045 
1046  RegisterAllocationScope register_scope(this);
1047 
1048  AllocateTopLevelRegisters();
1049 
1050  if (info()->literal()->CanSuspend()) {
1051  BuildGeneratorPrologue();
1052  }
1053 
1054  if (closure_scope()->NeedsContext()) {
1055  // Push a new inner context scope for the function.
1056  BuildNewLocalActivationContext();
1057  ContextScope local_function_context(this, closure_scope());
1058  BuildLocalActivationContextInitialization();
1059  GenerateBytecodeBody();
1060  } else {
1061  GenerateBytecodeBody();
1062  }
1063 
1064  // Check that we are not falling off the end.
1065  DCHECK(!builder()->RequiresImplicitReturn());
1066 }
1067 
1068 void BytecodeGenerator::GenerateBytecodeBody() {
1069  // Build the arguments object if it is used.
1070  VisitArgumentsObject(closure_scope()->arguments());
1071 
1072  // Build rest arguments array if it is used.
1073  Variable* rest_parameter = closure_scope()->rest_parameter();
1074  VisitRestArgumentsArray(rest_parameter);
1075 
1076  // Build assignment to the function name or {.this_function}
1077  // variables if used.
1078  VisitThisFunctionVariable(closure_scope()->function_var());
1079  VisitThisFunctionVariable(closure_scope()->this_function_var());
1080 
1081  // Build assignment to {new.target} variable if it is used.
1082  VisitNewTargetVariable(closure_scope()->new_target_var());
1083 
1084  // Create a generator object if necessary and initialize the
1085  // {.generator_object} variable.
1086  if (IsResumableFunction(info()->literal()->kind())) {
1087  BuildGeneratorObjectVariableInitialization();
1088  }
1089 
1090  // Emit tracing call if requested to do so.
1091  if (FLAG_trace) builder()->CallRuntime(Runtime::kTraceEnter);
1092 
1093  // Emit type profile call.
1094  if (info()->collect_type_profile()) {
1095  feedback_spec()->AddTypeProfileSlot();
1096  int num_parameters = closure_scope()->num_parameters();
1097  for (int i = 0; i < num_parameters; i++) {
1098  Register parameter(builder()->Parameter(i));
1099  builder()->LoadAccumulatorWithRegister(parameter).CollectTypeProfile(
1100  closure_scope()->parameter(i)->initializer_position());
1101  }
1102  }
1103 
1104  // Visit declarations within the function scope.
1105  VisitDeclarations(closure_scope()->declarations());
1106 
1107  // Emit initializing assignments for module namespace imports (if any).
1108  VisitModuleNamespaceImports();
1109 
1110  // Perform a stack-check before the body.
1111  builder()->StackCheck(info()->literal()->start_position());
1112 
1113  // The derived constructor case is handled in VisitCallSuper.
1114  if (IsBaseConstructor(function_kind()) &&
1115  info()->literal()->requires_instance_members_initializer()) {
1116  BuildInstanceMemberInitialization(Register::function_closure(),
1117  builder()->Receiver());
1118  }
1119 
1120  // Visit statements in the function body.
1121  VisitStatements(info()->literal()->body());
1122 
1123  // Emit an implicit return instruction in case control flow can fall off the
1124  // end of the function without an explicit return being present on all paths.
1125  if (builder()->RequiresImplicitReturn()) {
1126  builder()->LoadUndefined();
1127  BuildReturn();
1128  }
1129 }
1130 
1131 void BytecodeGenerator::AllocateTopLevelRegisters() {
1132  if (IsResumableFunction(info()->literal()->kind())) {
1133  // Either directly use generator_object_var or allocate a new register for
1134  // the incoming generator object.
1135  Variable* generator_object_var = closure_scope()->generator_object_var();
1136  if (generator_object_var->location() == VariableLocation::LOCAL) {
1137  incoming_new_target_or_generator_ =
1138  GetRegisterForLocalVariable(generator_object_var);
1139  } else {
1140  incoming_new_target_or_generator_ = register_allocator()->NewRegister();
1141  }
1142  } else if (closure_scope()->new_target_var()) {
1143  // Either directly use new_target_var or allocate a new register for
1144  // the incoming new target object.
1145  Variable* new_target_var = closure_scope()->new_target_var();
1146  if (new_target_var->location() == VariableLocation::LOCAL) {
1147  incoming_new_target_or_generator_ =
1148  GetRegisterForLocalVariable(new_target_var);
1149  } else {
1150  incoming_new_target_or_generator_ = register_allocator()->NewRegister();
1151  }
1152  }
1153 }
1154 
1155 void BytecodeGenerator::BuildGeneratorPrologue() {
1156  DCHECK_GT(info()->literal()->suspend_count(), 0);
1157  DCHECK(generator_object().is_valid());
1158  generator_jump_table_ =
1159  builder()->AllocateJumpTable(info()->literal()->suspend_count(), 0);
1160 
1161  // If the generator is not undefined, this is a resume, so perform state
1162  // dispatch.
1163  builder()->SwitchOnGeneratorState(generator_object(), generator_jump_table_);
1164 
1165  // Otherwise, fall-through to the ordinary function prologue, after which we
1166  // will run into the generator object creation and other extra code inserted
1167  // by the parser.
1168 }
1169 
1170 void BytecodeGenerator::VisitBlock(Block* stmt) {
1171  // Visit declarations and statements.
1172  CurrentScope current_scope(this, stmt->scope());
1173  if (stmt->scope() != nullptr && stmt->scope()->NeedsContext()) {
1174  BuildNewLocalBlockContext(stmt->scope());
1175  ContextScope scope(this, stmt->scope());
1176  VisitBlockDeclarationsAndStatements(stmt);
1177  } else {
1178  VisitBlockDeclarationsAndStatements(stmt);
1179  }
1180 }
1181 
1182 void BytecodeGenerator::VisitBlockDeclarationsAndStatements(Block* stmt) {
1183  BlockBuilder block_builder(builder(), block_coverage_builder_, stmt);
1184  ControlScopeForBreakable execution_control(this, stmt, &block_builder);
1185  if (stmt->scope() != nullptr) {
1186  VisitDeclarations(stmt->scope()->declarations());
1187  }
1188  VisitStatements(stmt->statements());
1189 }
1190 
1191 void BytecodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
1192  Variable* variable = decl->proxy()->var();
1193  switch (variable->location()) {
1194  case VariableLocation::UNALLOCATED: {
1195  DCHECK(!variable->binding_needs_init());
1196  FeedbackSlot slot =
1197  GetCachedLoadGlobalICSlot(NOT_INSIDE_TYPEOF, variable);
1198  globals_builder()->AddUndefinedDeclaration(variable->raw_name(), slot);
1199  break;
1200  }
1201  case VariableLocation::LOCAL:
1202  if (variable->binding_needs_init()) {
1203  Register destination(builder()->Local(variable->index()));
1204  builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
1205  }
1206  break;
1207  case VariableLocation::PARAMETER:
1208  if (variable->binding_needs_init()) {
1209  Register destination(builder()->Parameter(variable->index()));
1210  builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
1211  }
1212  break;
1213  case VariableLocation::CONTEXT:
1214  if (variable->binding_needs_init()) {
1215  DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
1216  builder()->LoadTheHole().StoreContextSlot(execution_context()->reg(),
1217  variable->index(), 0);
1218  }
1219  break;
1220  case VariableLocation::LOOKUP: {
1221  DCHECK_EQ(VariableMode::kVar, variable->mode());
1222  DCHECK(!variable->binding_needs_init());
1223 
1224  Register name = register_allocator()->NewRegister();
1225 
1226  builder()
1227  ->LoadLiteral(variable->raw_name())
1228  .StoreAccumulatorInRegister(name)
1229  .CallRuntime(Runtime::kDeclareEvalVar, name);
1230  break;
1231  }
1232  case VariableLocation::MODULE:
1233  if (variable->IsExport() && variable->binding_needs_init()) {
1234  builder()->LoadTheHole();
1235  BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1236  }
1237  // Nothing to do for imports.
1238  break;
1239  }
1240 }
1241 
1242 void BytecodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
1243  Variable* variable = decl->proxy()->var();
1244  DCHECK(variable->mode() == VariableMode::kLet ||
1245  variable->mode() == VariableMode::kVar);
1246  switch (variable->location()) {
1247  case VariableLocation::UNALLOCATED: {
1248  FeedbackSlot slot =
1249  GetCachedLoadGlobalICSlot(NOT_INSIDE_TYPEOF, variable);
1250  FeedbackSlot literal_slot = GetCachedCreateClosureSlot(decl->fun());
1251  globals_builder()->AddFunctionDeclaration(variable->raw_name(), slot,
1252  literal_slot, decl->fun());
1253  AddToEagerLiteralsIfEager(decl->fun());
1254  break;
1255  }
1256  case VariableLocation::PARAMETER:
1257  case VariableLocation::LOCAL: {
1258  VisitForAccumulatorValue(decl->fun());
1259  BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1260  break;
1261  }
1262  case VariableLocation::CONTEXT: {
1263  DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
1264  VisitForAccumulatorValue(decl->fun());
1265  builder()->StoreContextSlot(execution_context()->reg(), variable->index(),
1266  0);
1267  break;
1268  }
1269  case VariableLocation::LOOKUP: {
1270  RegisterList args = register_allocator()->NewRegisterList(2);
1271  builder()
1272  ->LoadLiteral(variable->raw_name())
1273  .StoreAccumulatorInRegister(args[0]);
1274  VisitForAccumulatorValue(decl->fun());
1275  builder()->StoreAccumulatorInRegister(args[1]).CallRuntime(
1276  Runtime::kDeclareEvalFunction, args);
1277  break;
1278  }
1279  case VariableLocation::MODULE:
1280  DCHECK_EQ(variable->mode(), VariableMode::kLet);
1281  DCHECK(variable->IsExport());
1282  VisitForAccumulatorValue(decl->fun());
1283  BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1284  break;
1285  }
1286  DCHECK_IMPLIES(decl->fun()->ShouldEagerCompile(),
1287  IsInEagerLiterals(decl->fun(), *eager_inner_literals_));
1288 }
1289 
1290 void BytecodeGenerator::VisitModuleNamespaceImports() {
1291  if (!closure_scope()->is_module_scope()) return;
1292 
1293  RegisterAllocationScope register_scope(this);
1294  Register module_request = register_allocator()->NewRegister();
1295 
1296  ModuleDescriptor* descriptor = closure_scope()->AsModuleScope()->module();
1297  for (auto entry : descriptor->namespace_imports()) {
1298  builder()
1299  ->LoadLiteral(Smi::FromInt(entry->module_request))
1300  .StoreAccumulatorInRegister(module_request)
1301  .CallRuntime(Runtime::kGetModuleNamespace, module_request);
1302  Variable* var = closure_scope()->LookupInModule(entry->local_name);
1303  BuildVariableAssignment(var, Token::INIT, HoleCheckMode::kElided);
1304  }
1305 }
1306 
1307 void BytecodeGenerator::VisitDeclarations(Declaration::List* declarations) {
1308  RegisterAllocationScope register_scope(this);
1309  DCHECK(globals_builder()->empty());
1310  for (Declaration* decl : *declarations) {
1311  RegisterAllocationScope register_scope(this);
1312  Visit(decl);
1313  }
1314  if (globals_builder()->empty()) return;
1315 
1316  globals_builder()->set_constant_pool_entry(
1317  builder()->AllocateDeferredConstantPoolEntry());
1318  int encoded_flags = DeclareGlobalsEvalFlag::encode(info()->is_eval()) |
1319  DeclareGlobalsNativeFlag::encode(info()->is_native());
1320 
1321  // Emit code to declare globals.
1322  RegisterList args = register_allocator()->NewRegisterList(3);
1323  builder()
1324  ->LoadConstantPoolEntry(globals_builder()->constant_pool_entry())
1325  .StoreAccumulatorInRegister(args[0])
1326  .LoadLiteral(Smi::FromInt(encoded_flags))
1327  .StoreAccumulatorInRegister(args[1])
1328  .MoveRegister(Register::function_closure(), args[2])
1329  .CallRuntime(Runtime::kDeclareGlobals, args);
1330 
1331  // Push and reset globals builder.
1332  global_declarations_.push_back(globals_builder());
1333  globals_builder_ = new (zone()) GlobalDeclarationsBuilder(zone());
1334 }
1335 
1336 void BytecodeGenerator::VisitStatements(
1337  const ZonePtrList<Statement>* statements) {
1338  for (int i = 0; i < statements->length(); i++) {
1339  // Allocate an outer register allocations scope for the statement.
1340  RegisterAllocationScope allocation_scope(this);
1341  Statement* stmt = statements->at(i);
1342  Visit(stmt);
1343  if (builder()->RemainderOfBlockIsDead()) break;
1344  }
1345 }
1346 
1347 void BytecodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1348  builder()->SetStatementPosition(stmt);
1349  VisitForEffect(stmt->expression());
1350 }
1351 
1352 void BytecodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
1353 }
1354 
1355 void BytecodeGenerator::VisitIfStatement(IfStatement* stmt) {
1356  ConditionalControlFlowBuilder conditional_builder(
1357  builder(), block_coverage_builder_, stmt);
1358  builder()->SetStatementPosition(stmt);
1359 
1360  if (stmt->condition()->ToBooleanIsTrue()) {
1361  // Generate then block unconditionally as always true.
1362  conditional_builder.Then();
1363  Visit(stmt->then_statement());
1364  } else if (stmt->condition()->ToBooleanIsFalse()) {
1365  // Generate else block unconditionally if it exists.
1366  if (stmt->HasElseStatement()) {
1367  conditional_builder.Else();
1368  Visit(stmt->else_statement());
1369  }
1370  } else {
1371  // TODO(oth): If then statement is BreakStatement or
1372  // ContinueStatement we can reduce number of generated
1373  // jump/jump_ifs here. See BasicLoops test.
1374  VisitForTest(stmt->condition(), conditional_builder.then_labels(),
1375  conditional_builder.else_labels(), TestFallthrough::kThen);
1376 
1377  conditional_builder.Then();
1378  Visit(stmt->then_statement());
1379 
1380  if (stmt->HasElseStatement()) {
1381  conditional_builder.JumpToEnd();
1382  conditional_builder.Else();
1383  Visit(stmt->else_statement());
1384  }
1385  }
1386 }
1387 
1388 void BytecodeGenerator::VisitSloppyBlockFunctionStatement(
1389  SloppyBlockFunctionStatement* stmt) {
1390  Visit(stmt->statement());
1391 }
1392 
1393 void BytecodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1394  AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1395  builder()->SetStatementPosition(stmt);
1396  execution_control()->Continue(stmt->target());
1397 }
1398 
1399 void BytecodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1400  AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1401  builder()->SetStatementPosition(stmt);
1402  execution_control()->Break(stmt->target());
1403 }
1404 
1405 void BytecodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1406  AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1407  builder()->SetStatementPosition(stmt);
1408  VisitForAccumulatorValue(stmt->expression());
1409  if (stmt->is_async_return()) {
1410  execution_control()->AsyncReturnAccumulator(stmt->end_position());
1411  } else {
1412  execution_control()->ReturnAccumulator(stmt->end_position());
1413  }
1414 }
1415 
1416 void BytecodeGenerator::VisitWithStatement(WithStatement* stmt) {
1417  builder()->SetStatementPosition(stmt);
1418  VisitForAccumulatorValue(stmt->expression());
1419  BuildNewLocalWithContext(stmt->scope());
1420  VisitInScope(stmt->statement(), stmt->scope());
1421 }
1422 
1423 void BytecodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1424  // We need this scope because we visit for register values. We have to
1425  // maintain a execution result scope where registers can be allocated.
1426  ZonePtrList<CaseClause>* clauses = stmt->cases();
1427  SwitchBuilder switch_builder(builder(), block_coverage_builder_, stmt,
1428  clauses->length());
1429  ControlScopeForBreakable scope(this, stmt, &switch_builder);
1430  int default_index = -1;
1431 
1432  builder()->SetStatementPosition(stmt);
1433 
1434  // Keep the switch value in a register until a case matches.
1435  Register tag = VisitForRegisterValue(stmt->tag());
1436  FeedbackSlot slot = clauses->length() > 0
1437  ? feedback_spec()->AddCompareICSlot()
1438  : FeedbackSlot::Invalid();
1439 
1440  // Iterate over all cases and create nodes for label comparison.
1441  for (int i = 0; i < clauses->length(); i++) {
1442  CaseClause* clause = clauses->at(i);
1443 
1444  // The default is not a test, remember index.
1445  if (clause->is_default()) {
1446  default_index = i;
1447  continue;
1448  }
1449 
1450  // Perform label comparison as if via '===' with tag.
1451  VisitForAccumulatorValue(clause->label());
1452  builder()->CompareOperation(Token::Value::EQ_STRICT, tag,
1453  feedback_index(slot));
1454  switch_builder.Case(ToBooleanMode::kAlreadyBoolean, i);
1455  }
1456 
1457  if (default_index >= 0) {
1458  // Emit default jump if there is a default case.
1459  switch_builder.DefaultAt(default_index);
1460  } else {
1461  // Otherwise if we have reached here none of the cases matched, so jump to
1462  // the end.
1463  switch_builder.Break();
1464  }
1465 
1466  // Iterate over all cases and create the case bodies.
1467  for (int i = 0; i < clauses->length(); i++) {
1468  CaseClause* clause = clauses->at(i);
1469  switch_builder.SetCaseTarget(i, clause);
1470  VisitStatements(clause->statements());
1471  }
1472 }
1473 
1474 void BytecodeGenerator::VisitIterationBody(IterationStatement* stmt,
1475  LoopBuilder* loop_builder) {
1476  loop_builder->LoopBody();
1477  ControlScopeForIteration execution_control(this, stmt, loop_builder);
1478  builder()->StackCheck(stmt->position());
1479  Visit(stmt->body());
1480  loop_builder->BindContinueTarget();
1481 }
1482 
1483 void BytecodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1484  LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1485  if (stmt->cond()->ToBooleanIsFalse()) {
1486  VisitIterationBody(stmt, &loop_builder);
1487  } else if (stmt->cond()->ToBooleanIsTrue()) {
1488  loop_builder.LoopHeader();
1489  VisitIterationBody(stmt, &loop_builder);
1490  loop_builder.JumpToHeader(loop_depth_);
1491  } else {
1492  loop_builder.LoopHeader();
1493  VisitIterationBody(stmt, &loop_builder);
1494  builder()->SetExpressionAsStatementPosition(stmt->cond());
1495  BytecodeLabels loop_backbranch(zone());
1496  VisitForTest(stmt->cond(), &loop_backbranch, loop_builder.break_labels(),
1497  TestFallthrough::kThen);
1498  loop_backbranch.Bind(builder());
1499  loop_builder.JumpToHeader(loop_depth_);
1500  }
1501 }
1502 
1503 void BytecodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1504  LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1505 
1506  if (stmt->cond()->ToBooleanIsFalse()) {
1507  // If the condition is false there is no need to generate the loop.
1508  return;
1509  }
1510 
1511  loop_builder.LoopHeader();
1512  if (!stmt->cond()->ToBooleanIsTrue()) {
1513  builder()->SetExpressionAsStatementPosition(stmt->cond());
1514  BytecodeLabels loop_body(zone());
1515  VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
1516  TestFallthrough::kThen);
1517  loop_body.Bind(builder());
1518  }
1519  VisitIterationBody(stmt, &loop_builder);
1520  loop_builder.JumpToHeader(loop_depth_);
1521 }
1522 
1523 void BytecodeGenerator::VisitForStatement(ForStatement* stmt) {
1524  LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1525 
1526  if (stmt->init() != nullptr) {
1527  Visit(stmt->init());
1528  }
1529  if (stmt->cond() && stmt->cond()->ToBooleanIsFalse()) {
1530  // If the condition is known to be false there is no need to generate
1531  // body, next or condition blocks. Init block should be generated.
1532  return;
1533  }
1534 
1535  loop_builder.LoopHeader();
1536  if (stmt->cond() && !stmt->cond()->ToBooleanIsTrue()) {
1537  builder()->SetExpressionAsStatementPosition(stmt->cond());
1538  BytecodeLabels loop_body(zone());
1539  VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
1540  TestFallthrough::kThen);
1541  loop_body.Bind(builder());
1542  }
1543  VisitIterationBody(stmt, &loop_builder);
1544  if (stmt->next() != nullptr) {
1545  builder()->SetStatementPosition(stmt->next());
1546  Visit(stmt->next());
1547  }
1548  loop_builder.JumpToHeader(loop_depth_);
1549 }
1550 
1551 void BytecodeGenerator::VisitForInAssignment(Expression* expr) {
1552  DCHECK(expr->IsValidReferenceExpression());
1553 
1554  // Evaluate assignment starting with the value to be stored in the
1555  // accumulator.
1556  Property* property = expr->AsProperty();
1557  LhsKind assign_type = Property::GetAssignType(property);
1558  switch (assign_type) {
1559  case VARIABLE: {
1560  VariableProxy* proxy = expr->AsVariableProxy();
1561  BuildVariableAssignment(proxy->var(), Token::ASSIGN,
1562  proxy->hole_check_mode());
1563  break;
1564  }
1565  case NAMED_PROPERTY: {
1566  RegisterAllocationScope register_scope(this);
1567  Register value = register_allocator()->NewRegister();
1568  builder()->StoreAccumulatorInRegister(value);
1569  Register object = VisitForRegisterValue(property->obj());
1570  const AstRawString* name =
1571  property->key()->AsLiteral()->AsRawPropertyName();
1572  builder()->LoadAccumulatorWithRegister(value);
1573  FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
1574  builder()->StoreNamedProperty(object, name, feedback_index(slot),
1575  language_mode());
1576  builder()->LoadAccumulatorWithRegister(value);
1577  break;
1578  }
1579  case KEYED_PROPERTY: {
1580  RegisterAllocationScope register_scope(this);
1581  Register value = register_allocator()->NewRegister();
1582  builder()->StoreAccumulatorInRegister(value);
1583  Register object = VisitForRegisterValue(property->obj());
1584  Register key = VisitForRegisterValue(property->key());
1585  builder()->LoadAccumulatorWithRegister(value);
1586  FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
1587  builder()->StoreKeyedProperty(object, key, feedback_index(slot),
1588  language_mode());
1589  builder()->LoadAccumulatorWithRegister(value);
1590  break;
1591  }
1592  case NAMED_SUPER_PROPERTY: {
1593  RegisterAllocationScope register_scope(this);
1594  RegisterList args = register_allocator()->NewRegisterList(4);
1595  builder()->StoreAccumulatorInRegister(args[3]);
1596  SuperPropertyReference* super_property =
1597  property->obj()->AsSuperPropertyReference();
1598  VisitForRegisterValue(super_property->this_var(), args[0]);
1599  VisitForRegisterValue(super_property->home_object(), args[1]);
1600  builder()
1601  ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
1602  .StoreAccumulatorInRegister(args[2])
1603  .CallRuntime(StoreToSuperRuntimeId(), args);
1604  break;
1605  }
1606  case KEYED_SUPER_PROPERTY: {
1607  RegisterAllocationScope register_scope(this);
1608  RegisterList args = register_allocator()->NewRegisterList(4);
1609  builder()->StoreAccumulatorInRegister(args[3]);
1610  SuperPropertyReference* super_property =
1611  property->obj()->AsSuperPropertyReference();
1612  VisitForRegisterValue(super_property->this_var(), args[0]);
1613  VisitForRegisterValue(super_property->home_object(), args[1]);
1614  VisitForRegisterValue(property->key(), args[2]);
1615  builder()->CallRuntime(StoreKeyedToSuperRuntimeId(), args);
1616  break;
1617  }
1618  }
1619 }
1620 
1621 void BytecodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1622  if (stmt->subject()->IsNullLiteral() ||
1623  stmt->subject()->IsUndefinedLiteral()) {
1624  // ForIn generates lots of code, skip if it wouldn't produce any effects.
1625  return;
1626  }
1627 
1628  BytecodeLabel subject_null_label, subject_undefined_label;
1629  FeedbackSlot slot = feedback_spec()->AddForInSlot();
1630 
1631  // Prepare the state for executing ForIn.
1632  builder()->SetExpressionAsStatementPosition(stmt->subject());
1633  VisitForAccumulatorValue(stmt->subject());
1634  builder()->JumpIfUndefined(&subject_undefined_label);
1635  builder()->JumpIfNull(&subject_null_label);
1636  Register receiver = register_allocator()->NewRegister();
1637  builder()->ToObject(receiver);
1638 
1639  // Used as kRegTriple and kRegPair in ForInPrepare and ForInNext.
1640  RegisterList triple = register_allocator()->NewRegisterList(3);
1641  Register cache_length = triple[2];
1642  builder()->ForInEnumerate(receiver);
1643  builder()->ForInPrepare(triple, feedback_index(slot));
1644 
1645  // Set up loop counter
1646  Register index = register_allocator()->NewRegister();
1647  builder()->LoadLiteral(Smi::zero());
1648  builder()->StoreAccumulatorInRegister(index);
1649 
1650  // The loop
1651  {
1652  LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1653  loop_builder.LoopHeader();
1654  builder()->SetExpressionAsStatementPosition(stmt->each());
1655  builder()->ForInContinue(index, cache_length);
1656  loop_builder.BreakIfFalse(ToBooleanMode::kAlreadyBoolean);
1657  builder()->ForInNext(receiver, index, triple.Truncate(2),
1658  feedback_index(slot));
1659  loop_builder.ContinueIfUndefined();
1660  VisitForInAssignment(stmt->each());
1661  VisitIterationBody(stmt, &loop_builder);
1662  builder()->ForInStep(index);
1663  builder()->StoreAccumulatorInRegister(index);
1664  loop_builder.JumpToHeader(loop_depth_);
1665  }
1666  builder()->Bind(&subject_null_label);
1667  builder()->Bind(&subject_undefined_label);
1668 }
1669 
1670 void BytecodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1671  LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1672 
1673  builder()->SetExpressionAsStatementPosition(stmt->assign_iterator());
1674  VisitForEffect(stmt->assign_iterator());
1675  VisitForEffect(stmt->assign_next());
1676 
1677  loop_builder.LoopHeader();
1678  builder()->SetExpressionAsStatementPosition(stmt->next_result());
1679  VisitForEffect(stmt->next_result());
1680  TypeHint type_hint = VisitForAccumulatorValue(stmt->result_done());
1681  loop_builder.BreakIfTrue(ToBooleanModeFromTypeHint(type_hint));
1682 
1683  VisitForEffect(stmt->assign_each());
1684  VisitIterationBody(stmt, &loop_builder);
1685  loop_builder.JumpToHeader(loop_depth_);
1686 }
1687 
1688 void BytecodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1689  // Update catch prediction tracking. The updated catch_prediction value lasts
1690  // until the end of the try_block in the AST node, and does not apply to the
1691  // catch_block.
1692  HandlerTable::CatchPrediction outer_catch_prediction = catch_prediction();
1693  set_catch_prediction(stmt->GetCatchPrediction(outer_catch_prediction));
1694 
1695  TryCatchBuilder try_control_builder(builder(), block_coverage_builder_, stmt,
1696  catch_prediction());
1697 
1698  // Preserve the context in a dedicated register, so that it can be restored
1699  // when the handler is entered by the stack-unwinding machinery.
1700  // TODO(mstarzinger): Be smarter about register allocation.
1701  Register context = register_allocator()->NewRegister();
1702  builder()->MoveRegister(Register::current_context(), context);
1703 
1704  // Evaluate the try-block inside a control scope. This simulates a handler
1705  // that is intercepting 'throw' control commands.
1706  try_control_builder.BeginTry(context);
1707  {
1708  ControlScopeForTryCatch scope(this, &try_control_builder);
1709  Visit(stmt->try_block());
1710  set_catch_prediction(outer_catch_prediction);
1711  }
1712  try_control_builder.EndTry();
1713 
1714  if (stmt->scope()) {
1715  // Create a catch scope that binds the exception.
1716  BuildNewLocalCatchContext(stmt->scope());
1717  builder()->StoreAccumulatorInRegister(context);
1718  }
1719 
1720  // If requested, clear message object as we enter the catch block.
1721  if (stmt->ShouldClearPendingException(outer_catch_prediction)) {
1722  builder()->LoadTheHole().SetPendingMessage();
1723  }
1724 
1725  // Load the catch context into the accumulator.
1726  builder()->LoadAccumulatorWithRegister(context);
1727 
1728  // Evaluate the catch-block.
1729  if (stmt->scope()) {
1730  VisitInScope(stmt->catch_block(), stmt->scope());
1731  } else {
1732  VisitBlock(stmt->catch_block());
1733  }
1734  try_control_builder.EndCatch();
1735 }
1736 
1737 void BytecodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1738  // We can't know whether the finally block will override ("catch") an
1739  // exception thrown in the try block, so we just adopt the outer prediction.
1740  TryFinallyBuilder try_control_builder(builder(), block_coverage_builder_,
1741  stmt, catch_prediction());
1742 
1743  // We keep a record of all paths that enter the finally-block to be able to
1744  // dispatch to the correct continuation point after the statements in the
1745  // finally-block have been evaluated.
1746  //
1747  // The try-finally construct can enter the finally-block in three ways:
1748  // 1. By exiting the try-block normally, falling through at the end.
1749  // 2. By exiting the try-block with a function-local control flow transfer
1750  // (i.e. through break/continue/return statements).
1751  // 3. By exiting the try-block with a thrown exception.
1752  //
1753  // The result register semantics depend on how the block was entered:
1754  // - ReturnStatement: It represents the return value being returned.
1755  // - ThrowStatement: It represents the exception being thrown.
1756  // - BreakStatement/ContinueStatement: Undefined and not used.
1757  // - Falling through into finally-block: Undefined and not used.
1758  Register token = register_allocator()->NewRegister();
1759  Register result = register_allocator()->NewRegister();
1760  ControlScope::DeferredCommands commands(this, token, result);
1761 
1762  // Preserve the context in a dedicated register, so that it can be restored
1763  // when the handler is entered by the stack-unwinding machinery.
1764  // TODO(mstarzinger): Be smarter about register allocation.
1765  Register context = register_allocator()->NewRegister();
1766  builder()->MoveRegister(Register::current_context(), context);
1767 
1768  // Evaluate the try-block inside a control scope. This simulates a handler
1769  // that is intercepting all control commands.
1770  try_control_builder.BeginTry(context);
1771  {
1772  ControlScopeForTryFinally scope(this, &try_control_builder, &commands);
1773  Visit(stmt->try_block());
1774  }
1775  try_control_builder.EndTry();
1776 
1777  // Record fall-through and exception cases.
1778  commands.RecordFallThroughPath();
1779  try_control_builder.LeaveTry();
1780  try_control_builder.BeginHandler();
1781  commands.RecordHandlerReThrowPath();
1782 
1783  // Pending message object is saved on entry.
1784  try_control_builder.BeginFinally();
1785  Register message = context; // Reuse register.
1786 
1787  // Clear message object as we enter the finally block.
1788  builder()->LoadTheHole().SetPendingMessage().StoreAccumulatorInRegister(
1789  message);
1790 
1791  // Evaluate the finally-block.
1792  Visit(stmt->finally_block());
1793  try_control_builder.EndFinally();
1794 
1795  // Pending message object is restored on exit.
1796  builder()->LoadAccumulatorWithRegister(message).SetPendingMessage();
1797 
1798  // Dynamic dispatch after the finally-block.
1799  commands.ApplyDeferredCommands();
1800 }
1801 
1802 void BytecodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1803  builder()->SetStatementPosition(stmt);
1804  builder()->Debugger();
1805 }
1806 
1807 void BytecodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1808  DCHECK(expr->scope()->outer_scope() == current_scope());
1809  uint8_t flags = CreateClosureFlags::Encode(
1810  expr->pretenure(), closure_scope()->is_function_scope());
1811  size_t entry = builder()->AllocateDeferredConstantPoolEntry();
1812  FeedbackSlot slot = GetCachedCreateClosureSlot(expr);
1813  builder()->CreateClosure(entry, feedback_index(slot), flags);
1814  function_literals_.push_back(std::make_pair(expr, entry));
1815  AddToEagerLiteralsIfEager(expr);
1816 }
1817 
1818 void BytecodeGenerator::AddToEagerLiteralsIfEager(FunctionLiteral* literal) {
1819  if (eager_inner_literals_ && literal->ShouldEagerCompile()) {
1820  DCHECK(!IsInEagerLiterals(literal, *eager_inner_literals_));
1821  eager_inner_literals_->push_back(literal);
1822  }
1823 }
1824 
1825 bool BytecodeGenerator::ShouldOptimizeAsOneShot() const {
1826  if (!FLAG_enable_one_shot_optimization) return false;
1827 
1828  if (loop_depth_ > 0) return false;
1829 
1830  // A non-top-level iife is likely to be executed multiple times and so
1831  // shouldn`t be optimized as one-shot.
1832  bool is_toplevel_iife = info()->literal()->is_iife() &&
1833  current_scope()->outer_scope()->is_script_scope();
1834  return info()->literal()->is_toplevel() || is_toplevel_iife;
1835 }
1836 
1837 void BytecodeGenerator::BuildClassLiteral(ClassLiteral* expr, Register name) {
1838  size_t class_boilerplate_entry =
1839  builder()->AllocateDeferredConstantPoolEntry();
1840  class_literals_.push_back(std::make_pair(expr, class_boilerplate_entry));
1841 
1842  VisitDeclarations(expr->scope()->declarations());
1843  Register class_constructor = register_allocator()->NewRegister();
1844 
1845  {
1846  RegisterAllocationScope register_scope(this);
1847  RegisterList args = register_allocator()->NewGrowableRegisterList();
1848 
1849  Register class_boilerplate = register_allocator()->GrowRegisterList(&args);
1850  Register class_constructor_in_args =
1851  register_allocator()->GrowRegisterList(&args);
1852  Register super_class = register_allocator()->GrowRegisterList(&args);
1853  DCHECK_EQ(ClassBoilerplate::kFirstDynamicArgumentIndex,
1854  args.register_count());
1855 
1856  VisitForAccumulatorValueOrTheHole(expr->extends());
1857  builder()->StoreAccumulatorInRegister(super_class);
1858 
1859  VisitFunctionLiteral(expr->constructor());
1860  builder()
1861  ->StoreAccumulatorInRegister(class_constructor)
1862  .MoveRegister(class_constructor, class_constructor_in_args)
1863  .LoadConstantPoolEntry(class_boilerplate_entry)
1864  .StoreAccumulatorInRegister(class_boilerplate);
1865 
1866  // Create computed names and method values nodes to store into the literal.
1867  for (int i = 0; i < expr->properties()->length(); i++) {
1868  ClassLiteral::Property* property = expr->properties()->at(i);
1869  if (property->is_computed_name()) {
1870  DCHECK_IMPLIES(property->kind() == ClassLiteral::Property::FIELD,
1871  !property->is_private());
1872  Register key = register_allocator()->GrowRegisterList(&args);
1873 
1874  builder()->SetExpressionAsStatementPosition(property->key());
1875  BuildLoadPropertyKey(property, key);
1876  if (property->is_static()) {
1877  // The static prototype property is read only. We handle the non
1878  // computed property name case in the parser. Since this is the only
1879  // case where we need to check for an own read only property we
1880  // special case this so we do not need to do this for every property.
1881 
1882  FeedbackSlot slot = GetDummyCompareICSlot();
1883  BytecodeLabel done;
1884  builder()
1885  ->LoadLiteral(ast_string_constants()->prototype_string())
1886  .CompareOperation(Token::Value::EQ_STRICT, key,
1887  feedback_index(slot))
1888  .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &done)
1889  .CallRuntime(Runtime::kThrowStaticPrototypeError)
1890  .Bind(&done);
1891  }
1892 
1893  if (property->kind() == ClassLiteral::Property::FIELD &&
1894  !property->is_private()) {
1895  // Initialize field's name variable with the computed name.
1896  DCHECK_NOT_NULL(property->computed_name_var());
1897  builder()->LoadAccumulatorWithRegister(key);
1898  BuildVariableAssignment(property->computed_name_var(), Token::INIT,
1899  HoleCheckMode::kElided);
1900  }
1901  }
1902 
1903  if (property->kind() == ClassLiteral::Property::FIELD) {
1904  if (property->is_private()) {
1905  builder()->CallRuntime(Runtime::kCreatePrivateNameSymbol);
1906  DCHECK_NOT_NULL(property->private_name_var());
1907  BuildVariableAssignment(property->private_name_var(), Token::INIT,
1908  HoleCheckMode::kElided);
1909  }
1910  // We don't compute field's value here, but instead do it in the
1911  // initializer function.
1912  continue;
1913  }
1914 
1915  Register value = register_allocator()->GrowRegisterList(&args);
1916  VisitForRegisterValue(property->value(), value);
1917  }
1918 
1919  builder()->CallRuntime(Runtime::kDefineClass, args);
1920  }
1921  Register prototype = register_allocator()->NewRegister();
1922  builder()->StoreAccumulatorInRegister(prototype);
1923 
1924  // Assign to class variable.
1925  if (expr->class_variable() != nullptr) {
1926  DCHECK(expr->class_variable()->IsStackLocal() ||
1927  expr->class_variable()->IsContextSlot());
1928  builder()->LoadAccumulatorWithRegister(class_constructor);
1929  BuildVariableAssignment(expr->class_variable(), Token::INIT,
1930  HoleCheckMode::kElided);
1931  }
1932 
1933  if (expr->instance_members_initializer_function() != nullptr) {
1934  Register initializer =
1935  VisitForRegisterValue(expr->instance_members_initializer_function());
1936 
1937  if (FunctionLiteral::NeedsHomeObject(
1938  expr->instance_members_initializer_function())) {
1939  FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
1940  builder()->LoadAccumulatorWithRegister(prototype).StoreHomeObjectProperty(
1941  initializer, feedback_index(slot), language_mode());
1942  }
1943 
1944  FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
1945  builder()
1946  ->LoadAccumulatorWithRegister(initializer)
1947  .StoreClassFieldsInitializer(class_constructor, feedback_index(slot))
1948  .LoadAccumulatorWithRegister(class_constructor);
1949  }
1950 
1951  if (expr->static_fields_initializer() != nullptr) {
1952  // TODO(gsathya): This can be optimized away to be a part of the
1953  // class boilerplate in the future. The name argument can be
1954  // passed to the DefineClass runtime function and have it set
1955  // there.
1956  if (name.is_valid()) {
1957  Register key = register_allocator()->NewRegister();
1958  builder()
1959  ->LoadLiteral(ast_string_constants()->name_string())
1960  .StoreAccumulatorInRegister(key);
1961 
1962  DataPropertyInLiteralFlags data_property_flags =
1963  DataPropertyInLiteralFlag::kNoFlags;
1964  FeedbackSlot slot =
1965  feedback_spec()->AddStoreDataPropertyInLiteralICSlot();
1966  builder()->LoadAccumulatorWithRegister(name).StoreDataPropertyInLiteral(
1967  class_constructor, key, data_property_flags, feedback_index(slot));
1968  }
1969 
1970  RegisterList args = register_allocator()->NewRegisterList(1);
1971  Register initializer =
1972  VisitForRegisterValue(expr->static_fields_initializer());
1973 
1974  if (FunctionLiteral::NeedsHomeObject(expr->static_fields_initializer())) {
1975  FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
1976  builder()
1977  ->LoadAccumulatorWithRegister(class_constructor)
1978  .StoreHomeObjectProperty(initializer, feedback_index(slot),
1979  language_mode());
1980  }
1981 
1982  builder()
1983  ->MoveRegister(class_constructor, args[0])
1984  .CallProperty(initializer, args,
1985  feedback_index(feedback_spec()->AddCallICSlot()));
1986  }
1987  builder()->LoadAccumulatorWithRegister(class_constructor);
1988 }
1989 
1990 void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr) {
1991  VisitClassLiteral(expr, Register::invalid_value());
1992 }
1993 
1994 void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr, Register name) {
1995  CurrentScope current_scope(this, expr->scope());
1996  DCHECK_NOT_NULL(expr->scope());
1997  if (expr->scope()->NeedsContext()) {
1998  BuildNewLocalBlockContext(expr->scope());
1999  ContextScope scope(this, expr->scope());
2000  BuildClassLiteral(expr, name);
2001  } else {
2002  BuildClassLiteral(expr, name);
2003  }
2004 }
2005 
2006 void BytecodeGenerator::VisitInitializeClassMembersStatement(
2007  InitializeClassMembersStatement* stmt) {
2008  RegisterList args = register_allocator()->NewRegisterList(3);
2009  Register constructor = args[0], key = args[1], value = args[2];
2010  builder()->MoveRegister(builder()->Receiver(), constructor);
2011 
2012  for (int i = 0; i < stmt->fields()->length(); i++) {
2013  ClassLiteral::Property* property = stmt->fields()->at(i);
2014 
2015  if (property->is_computed_name()) {
2016  DCHECK_EQ(property->kind(), ClassLiteral::Property::FIELD);
2017  DCHECK(!property->is_private());
2018  Variable* var = property->computed_name_var();
2019  DCHECK_NOT_NULL(var);
2020  // The computed name is already evaluated and stored in a
2021  // variable at class definition time.
2022  BuildVariableLoad(var, HoleCheckMode::kElided);
2023  builder()->StoreAccumulatorInRegister(key);
2024  } else if (property->kind() == ClassLiteral::Property::FIELD &&
2025  property->is_private()) {
2026  Variable* private_name_var = property->private_name_var();
2027  DCHECK_NOT_NULL(private_name_var);
2028  BuildVariableLoad(private_name_var, HoleCheckMode::kElided);
2029  builder()->StoreAccumulatorInRegister(key);
2030  } else {
2031  BuildLoadPropertyKey(property, key);
2032  }
2033 
2034  builder()->SetExpressionAsStatementPosition(property->value());
2035  VisitForRegisterValue(property->value(), value);
2036  VisitSetHomeObject(value, constructor, property);
2037 
2038  Runtime::FunctionId function_id =
2039  property->kind() == ClassLiteral::Property::FIELD &&
2040  !property->is_private()
2041  ? Runtime::kCreateDataProperty
2042  : Runtime::kAddPrivateField;
2043  builder()->CallRuntime(function_id, args);
2044  }
2045 }
2046 
2047 void BytecodeGenerator::BuildInstanceMemberInitialization(Register constructor,
2048  Register instance) {
2049  RegisterList args = register_allocator()->NewRegisterList(1);
2050  Register initializer = register_allocator()->NewRegister();
2051 
2052  FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
2053  BytecodeLabel done;
2054 
2055  builder()
2056  ->LoadClassFieldsInitializer(constructor, feedback_index(slot))
2057  // TODO(gsathya): This jump can be elided for the base
2058  // constructor and derived constructor. This is only required
2059  // when called from an arrow function.
2060  .JumpIfUndefined(&done)
2061  .StoreAccumulatorInRegister(initializer)
2062  .MoveRegister(instance, args[0])
2063  .CallProperty(initializer, args,
2064  feedback_index(feedback_spec()->AddCallICSlot()))
2065  .Bind(&done);
2066 }
2067 
2068 void BytecodeGenerator::VisitNativeFunctionLiteral(
2069  NativeFunctionLiteral* expr) {
2070  size_t entry = builder()->AllocateDeferredConstantPoolEntry();
2071  FeedbackSlot slot = feedback_spec()->AddCreateClosureSlot();
2072  builder()->CreateClosure(entry, feedback_index(slot), NOT_TENURED);
2073  native_function_literals_.push_back(std::make_pair(expr, entry));
2074 }
2075 
2076 void BytecodeGenerator::VisitDoExpression(DoExpression* expr) {
2077  VisitBlock(expr->block());
2078  VisitVariableProxy(expr->result());
2079 }
2080 
2081 void BytecodeGenerator::VisitConditional(Conditional* expr) {
2082  ConditionalControlFlowBuilder conditional_builder(
2083  builder(), block_coverage_builder_, expr);
2084 
2085  if (expr->condition()->ToBooleanIsTrue()) {
2086  // Generate then block unconditionally as always true.
2087  conditional_builder.Then();
2088  VisitForAccumulatorValue(expr->then_expression());
2089  } else if (expr->condition()->ToBooleanIsFalse()) {
2090  // Generate else block unconditionally if it exists.
2091  conditional_builder.Else();
2092  VisitForAccumulatorValue(expr->else_expression());
2093  } else {
2094  VisitForTest(expr->condition(), conditional_builder.then_labels(),
2095  conditional_builder.else_labels(), TestFallthrough::kThen);
2096 
2097  conditional_builder.Then();
2098  VisitForAccumulatorValue(expr->then_expression());
2099  conditional_builder.JumpToEnd();
2100 
2101  conditional_builder.Else();
2102  VisitForAccumulatorValue(expr->else_expression());
2103  }
2104 }
2105 
2106 void BytecodeGenerator::VisitLiteral(Literal* expr) {
2107  if (execution_result()->IsEffect()) return;
2108  switch (expr->type()) {
2109  case Literal::kSmi:
2110  builder()->LoadLiteral(expr->AsSmiLiteral());
2111  break;
2112  case Literal::kHeapNumber:
2113  builder()->LoadLiteral(expr->AsNumber());
2114  break;
2115  case Literal::kUndefined:
2116  builder()->LoadUndefined();
2117  break;
2118  case Literal::kBoolean:
2119  builder()->LoadBoolean(expr->ToBooleanIsTrue());
2120  execution_result()->SetResultIsBoolean();
2121  break;
2122  case Literal::kNull:
2123  builder()->LoadNull();
2124  break;
2125  case Literal::kTheHole:
2126  builder()->LoadTheHole();
2127  break;
2128  case Literal::kString:
2129  builder()->LoadLiteral(expr->AsRawString());
2130  execution_result()->SetResultIsString();
2131  break;
2132  case Literal::kSymbol:
2133  builder()->LoadLiteral(expr->AsSymbol());
2134  break;
2135  case Literal::kBigInt:
2136  builder()->LoadLiteral(expr->AsBigInt());
2137  break;
2138  }
2139 }
2140 
2141 void BytecodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
2142  // Materialize a regular expression literal.
2143  builder()->CreateRegExpLiteral(
2144  expr->raw_pattern(), feedback_index(feedback_spec()->AddLiteralSlot()),
2145  expr->flags());
2146 }
2147 
2148 void BytecodeGenerator::BuildCreateObjectLiteral(Register literal,
2149  uint8_t flags, size_t entry) {
2150  if (ShouldOptimizeAsOneShot()) {
2151  RegisterList args = register_allocator()->NewRegisterList(2);
2152  builder()
2153  ->LoadConstantPoolEntry(entry)
2154  .StoreAccumulatorInRegister(args[0])
2155  .LoadLiteral(Smi::FromInt(flags))
2156  .StoreAccumulatorInRegister(args[1])
2157  .CallRuntime(Runtime::kCreateObjectLiteralWithoutAllocationSite, args)
2158  .StoreAccumulatorInRegister(literal);
2159 
2160  } else {
2161  // TODO(cbruni): Directly generate runtime call for literals we cannot
2162  // optimize once the CreateShallowObjectLiteral stub is in sync with the TF
2163  // optimizations.
2164  int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2165  builder()
2166  ->CreateObjectLiteral(entry, literal_index, flags)
2167  .StoreAccumulatorInRegister(literal);
2168  }
2169 }
2170 
2171 void BytecodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
2172  expr->InitDepthAndFlags();
2173 
2174  // Fast path for the empty object literal which doesn't need an
2175  // AllocationSite.
2176  if (expr->IsEmptyObjectLiteral()) {
2177  DCHECK(expr->IsFastCloningSupported());
2178  builder()->CreateEmptyObjectLiteral();
2179  return;
2180  }
2181 
2182  // Deep-copy the literal boilerplate.
2183  uint8_t flags = CreateObjectLiteralFlags::Encode(
2184  expr->ComputeFlags(), expr->IsFastCloningSupported());
2185 
2186  Register literal = register_allocator()->NewRegister();
2187 
2188  // Create literal object.
2189  int property_index = 0;
2190  bool clone_object_spread =
2191  expr->properties()->first()->kind() == ObjectLiteral::Property::SPREAD;
2192  if (clone_object_spread) {
2193  // Avoid the slow path for spreads in the following common cases:
2194  // 1) `let obj = { ...source }`
2195  // 2) `let obj = { ...source, override: 1 }`
2196  // 3) `let obj = { ...source, ...overrides }`
2197  RegisterAllocationScope register_scope(this);
2198  Expression* property = expr->properties()->first()->value();
2199  Register from_value = VisitForRegisterValue(property);
2200 
2201  BytecodeLabels clone_object(zone());
2202  builder()->JumpIfUndefined(clone_object.New());
2203  builder()->JumpIfNull(clone_object.New());
2204  builder()->ToObject(from_value);
2205 
2206  clone_object.Bind(builder());
2207  int clone_index = feedback_index(feedback_spec()->AddCloneObjectSlot());
2208  builder()->CloneObject(from_value, flags, clone_index);
2209  builder()->StoreAccumulatorInRegister(literal);
2210  property_index++;
2211  } else {
2212  size_t entry;
2213  // If constant properties is an empty fixed array, use a cached empty fixed
2214  // array to ensure it's only added to the constant pool once.
2215  if (expr->properties_count() == 0) {
2216  entry = builder()->EmptyObjectBoilerplateDescriptionConstantPoolEntry();
2217  } else {
2218  entry = builder()->AllocateDeferredConstantPoolEntry();
2219  object_literals_.push_back(std::make_pair(expr, entry));
2220  }
2221  BuildCreateObjectLiteral(literal, flags, entry);
2222  }
2223 
2224  // Store computed values into the literal.
2225  AccessorTable accessor_table(zone());
2226  for (; property_index < expr->properties()->length(); property_index++) {
2227  ObjectLiteral::Property* property = expr->properties()->at(property_index);
2228  if (property->is_computed_name()) break;
2229  if (!clone_object_spread && property->IsCompileTimeValue()) continue;
2230 
2231  RegisterAllocationScope inner_register_scope(this);
2232  Literal* key = property->key()->AsLiteral();
2233  switch (property->kind()) {
2234  case ObjectLiteral::Property::SPREAD:
2235  UNREACHABLE();
2236  case ObjectLiteral::Property::CONSTANT:
2237  case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2238  DCHECK(clone_object_spread || !property->value()->IsCompileTimeValue());
2239  V8_FALLTHROUGH;
2240  case ObjectLiteral::Property::COMPUTED: {
2241  // It is safe to use [[Put]] here because the boilerplate already
2242  // contains computed properties with an uninitialized value.
2243  if (key->IsStringLiteral()) {
2244  DCHECK(key->IsPropertyName());
2245  if (property->emit_store()) {
2246  builder()->SetExpressionPosition(property->value());
2247  VisitForAccumulatorValue(property->value());
2248  FeedbackSlot slot = feedback_spec()->AddStoreOwnICSlot();
2249  if (FunctionLiteral::NeedsHomeObject(property->value())) {
2250  RegisterAllocationScope register_scope(this);
2251  Register value = register_allocator()->NewRegister();
2252  builder()->StoreAccumulatorInRegister(value);
2253  builder()->StoreNamedOwnProperty(
2254  literal, key->AsRawPropertyName(), feedback_index(slot));
2255  VisitSetHomeObject(value, literal, property);
2256  } else {
2257  builder()->StoreNamedOwnProperty(
2258  literal, key->AsRawPropertyName(), feedback_index(slot));
2259  }
2260  } else {
2261  builder()->SetExpressionPosition(property->value());
2262  VisitForEffect(property->value());
2263  }
2264  } else {
2265  RegisterList args = register_allocator()->NewRegisterList(4);
2266 
2267  builder()->MoveRegister(literal, args[0]);
2268  builder()->SetExpressionPosition(property->key());
2269  VisitForRegisterValue(property->key(), args[1]);
2270  builder()->SetExpressionPosition(property->value());
2271  VisitForRegisterValue(property->value(), args[2]);
2272  if (property->emit_store()) {
2273  builder()
2274  ->LoadLiteral(Smi::FromEnum(LanguageMode::kSloppy))
2275  .StoreAccumulatorInRegister(args[3])
2276  .CallRuntime(Runtime::kSetKeyedProperty, args);
2277  Register value = args[2];
2278  VisitSetHomeObject(value, literal, property);
2279  }
2280  }
2281  break;
2282  }
2283  case ObjectLiteral::Property::PROTOTYPE: {
2284  // __proto__:null is handled by CreateObjectLiteral.
2285  if (property->IsNullPrototype()) break;
2286  DCHECK(property->emit_store());
2287  DCHECK(!property->NeedsSetFunctionName());
2288  RegisterList args = register_allocator()->NewRegisterList(2);
2289  builder()->MoveRegister(literal, args[0]);
2290  builder()->SetExpressionPosition(property->value());
2291  VisitForRegisterValue(property->value(), args[1]);
2292  builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
2293  break;
2294  }
2295  case ObjectLiteral::Property::GETTER:
2296  if (property->emit_store()) {
2297  accessor_table.lookup(key)->second->getter = property;
2298  }
2299  break;
2300  case ObjectLiteral::Property::SETTER:
2301  if (property->emit_store()) {
2302  accessor_table.lookup(key)->second->setter = property;
2303  }
2304  break;
2305  }
2306  }
2307 
2308  // Define accessors, using only a single call to the runtime for each pair of
2309  // corresponding getters and setters.
2310  for (AccessorTable::Iterator it = accessor_table.begin();
2311  it != accessor_table.end(); ++it) {
2312  RegisterAllocationScope inner_register_scope(this);
2313  RegisterList args = register_allocator()->NewRegisterList(5);
2314  builder()->MoveRegister(literal, args[0]);
2315  VisitForRegisterValue(it->first, args[1]);
2316  VisitObjectLiteralAccessor(literal, it->second->getter, args[2]);
2317  VisitObjectLiteralAccessor(literal, it->second->setter, args[3]);
2318  builder()
2319  ->LoadLiteral(Smi::FromInt(NONE))
2320  .StoreAccumulatorInRegister(args[4])
2321  .CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, args);
2322  }
2323 
2324  // Object literals have two parts. The "static" part on the left contains no
2325  // computed property names, and so we can compute its map ahead of time; see
2326  // Runtime_CreateObjectLiteralBoilerplate. The second "dynamic" part starts
2327  // with the first computed property name and continues with all properties to
2328  // its right. All the code from above initializes the static component of the
2329  // object literal, and arranges for the map of the result to reflect the
2330  // static order in which the keys appear. For the dynamic properties, we
2331  // compile them into a series of "SetOwnProperty" runtime calls. This will
2332  // preserve insertion order.
2333  for (; property_index < expr->properties()->length(); property_index++) {
2334  ObjectLiteral::Property* property = expr->properties()->at(property_index);
2335  RegisterAllocationScope inner_register_scope(this);
2336 
2337  if (property->IsPrototype()) {
2338  // __proto__:null is handled by CreateObjectLiteral.
2339  if (property->IsNullPrototype()) continue;
2340  DCHECK(property->emit_store());
2341  DCHECK(!property->NeedsSetFunctionName());
2342  RegisterList args = register_allocator()->NewRegisterList(2);
2343  builder()->MoveRegister(literal, args[0]);
2344  builder()->SetExpressionPosition(property->value());
2345  VisitForRegisterValue(property->value(), args[1]);
2346  builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
2347  continue;
2348  }
2349 
2350  switch (property->kind()) {
2351  case ObjectLiteral::Property::CONSTANT:
2352  case ObjectLiteral::Property::COMPUTED:
2353  case ObjectLiteral::Property::MATERIALIZED_LITERAL: {
2354  Register key = register_allocator()->NewRegister();
2355  BuildLoadPropertyKey(property, key);
2356  builder()->SetExpressionPosition(property->value());
2357  Register value;
2358 
2359  // Static class fields require the name property to be set on
2360  // the class, meaning we can't wait until the
2361  // StoreDataPropertyInLiteral call later to set the name.
2362  if (property->value()->IsClassLiteral() &&
2363  property->value()->AsClassLiteral()->static_fields_initializer() !=
2364  nullptr) {
2365  value = register_allocator()->NewRegister();
2366  VisitClassLiteral(property->value()->AsClassLiteral(), key);
2367  builder()->StoreAccumulatorInRegister(value);
2368  } else {
2369  value = VisitForRegisterValue(property->value());
2370  }
2371  VisitSetHomeObject(value, literal, property);
2372 
2373  DataPropertyInLiteralFlags data_property_flags =
2374  DataPropertyInLiteralFlag::kNoFlags;
2375  if (property->NeedsSetFunctionName()) {
2376  data_property_flags |= DataPropertyInLiteralFlag::kSetFunctionName;
2377  }
2378 
2379  FeedbackSlot slot =
2380  feedback_spec()->AddStoreDataPropertyInLiteralICSlot();
2381  builder()
2382  ->LoadAccumulatorWithRegister(value)
2383  .StoreDataPropertyInLiteral(literal, key, data_property_flags,
2384  feedback_index(slot));
2385  break;
2386  }
2387  case ObjectLiteral::Property::GETTER:
2388  case ObjectLiteral::Property::SETTER: {
2389  RegisterList args = register_allocator()->NewRegisterList(4);
2390  builder()->MoveRegister(literal, args[0]);
2391  BuildLoadPropertyKey(property, args[1]);
2392  builder()->SetExpressionPosition(property->value());
2393  VisitForRegisterValue(property->value(), args[2]);
2394  VisitSetHomeObject(args[2], literal, property);
2395  builder()
2396  ->LoadLiteral(Smi::FromInt(NONE))
2397  .StoreAccumulatorInRegister(args[3]);
2398  Runtime::FunctionId function_id =
2399  property->kind() == ObjectLiteral::Property::GETTER
2400  ? Runtime::kDefineGetterPropertyUnchecked
2401  : Runtime::kDefineSetterPropertyUnchecked;
2402  builder()->CallRuntime(function_id, args);
2403  break;
2404  }
2405  case ObjectLiteral::Property::SPREAD: {
2406  RegisterList args = register_allocator()->NewRegisterList(2);
2407  builder()->MoveRegister(literal, args[0]);
2408  builder()->SetExpressionPosition(property->value());
2409  VisitForRegisterValue(property->value(), args[1]);
2410  builder()->CallRuntime(Runtime::kCopyDataProperties, args);
2411  break;
2412  }
2413  case ObjectLiteral::Property::PROTOTYPE:
2414  UNREACHABLE(); // Handled specially above.
2415  break;
2416  }
2417  }
2418 
2419  builder()->LoadAccumulatorWithRegister(literal);
2420 }
2421 
2422 void BytecodeGenerator::BuildArrayLiteralSpread(Spread* spread, Register array,
2423  Register index,
2424  FeedbackSlot index_slot,
2425  FeedbackSlot element_slot) {
2426  RegisterAllocationScope register_scope(this);
2427  Register value = register_allocator()->NewRegister();
2428 
2429  builder()->SetExpressionAsStatementPosition(spread->expression());
2430  IteratorRecord iterator =
2431  BuildGetIteratorRecord(spread->expression(), IteratorType::kNormal);
2432 
2433  LoopBuilder loop_builder(builder(), nullptr, nullptr);
2434  loop_builder.LoopHeader();
2435 
2436  // Call the iterator's .next() method. Break from the loop if the `done`
2437  // property is truthy, otherwise load the value from the iterator result and
2438  // append the argument.
2439  BuildIteratorNext(iterator, value);
2440  builder()->LoadNamedProperty(
2441  value, ast_string_constants()->done_string(),
2442  feedback_index(feedback_spec()->AddLoadICSlot()));
2443  loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
2444 
2445  loop_builder.LoopBody();
2446  builder()
2447  // value = value.value
2448  ->LoadNamedProperty(value, ast_string_constants()->value_string(),
2449  feedback_index(feedback_spec()->AddLoadICSlot()))
2450  .StoreAccumulatorInRegister(value)
2451  // array[index] = value
2452  .StoreInArrayLiteral(array, index, feedback_index(element_slot))
2453  // index++
2454  .LoadAccumulatorWithRegister(index)
2455  .UnaryOperation(Token::INC, feedback_index(index_slot))
2456  .StoreAccumulatorInRegister(index);
2457  loop_builder.BindContinueTarget();
2458  loop_builder.JumpToHeader(loop_depth_);
2459 }
2460 
2461 void BytecodeGenerator::BuildCreateArrayLiteral(
2462  const ZonePtrList<Expression>* elements, ArrayLiteral* expr) {
2463  RegisterAllocationScope register_scope(this);
2464  Register index = register_allocator()->NewRegister();
2465  Register array = register_allocator()->NewRegister();
2466  SharedFeedbackSlot element_slot(feedback_spec(),
2467  FeedbackSlotKind::kStoreInArrayLiteral);
2468  ZonePtrList<Expression>::iterator current = elements->begin();
2469  ZonePtrList<Expression>::iterator end = elements->end();
2470  bool is_empty = elements->is_empty();
2471 
2472  if (!is_empty && (*current)->IsSpread()) {
2473  // If we have a leading spread, use CreateArrayFromIterable to create
2474  // an array from it and then add the remaining components to that array.
2475  VisitForAccumulatorValue(*current);
2476  builder()->CreateArrayFromIterable().StoreAccumulatorInRegister(array);
2477 
2478  if (++current != end) {
2479  // If there are remaning elements, prepare the index register that is
2480  // used for adding those elements. The next index is the length of the
2481  // newly created array.
2482  auto length = ast_string_constants()->length_string();
2483  int length_load_slot = feedback_index(feedback_spec()->AddLoadICSlot());
2484  builder()
2485  ->LoadNamedProperty(array, length, length_load_slot)
2486  .StoreAccumulatorInRegister(index);
2487  }
2488  } else if (expr != nullptr) {
2489  // There are some elements before the first (if any) spread, and we can
2490  // use a boilerplate when creating the initial array from those elements.
2491 
2492  // First, allocate a constant pool entry for the boilerplate that will
2493  // be created during finalization, and will contain all the constant
2494  // elements before the first spread. This also handle the empty array case
2495  // and one-shot optimization.
2496  uint8_t flags = CreateArrayLiteralFlags::Encode(
2497  expr->IsFastCloningSupported(), expr->ComputeFlags());
2498  bool optimize_as_one_shot = ShouldOptimizeAsOneShot();
2499  size_t entry;
2500  if (is_empty && optimize_as_one_shot) {
2501  entry = builder()->EmptyArrayBoilerplateDescriptionConstantPoolEntry();
2502  } else if (!is_empty) {
2503  entry = builder()->AllocateDeferredConstantPoolEntry();
2504  array_literals_.push_back(std::make_pair(expr, entry));
2505  }
2506 
2507  if (optimize_as_one_shot) {
2508  RegisterList args = register_allocator()->NewRegisterList(2);
2509  builder()
2510  ->LoadConstantPoolEntry(entry)
2511  .StoreAccumulatorInRegister(args[0])
2512  .LoadLiteral(Smi::FromInt(flags))
2513  .StoreAccumulatorInRegister(args[1])
2514  .CallRuntime(Runtime::kCreateArrayLiteralWithoutAllocationSite, args);
2515  } else if (is_empty) {
2516  // Empty array literal fast-path.
2517  int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2518  DCHECK(expr->IsFastCloningSupported());
2519  builder()->CreateEmptyArrayLiteral(literal_index);
2520  } else {
2521  // Create array literal from boilerplate.
2522  int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2523  builder()->CreateArrayLiteral(entry, literal_index, flags);
2524  }
2525  builder()->StoreAccumulatorInRegister(array);
2526 
2527  // Insert the missing non-constant elements, up until the first spread
2528  // index, into the initial array (the remaining elements will be inserted
2529  // below).
2530  DCHECK_EQ(current, elements->begin());
2531  ZonePtrList<Expression>::iterator first_spread_or_end =
2532  expr->first_spread_index() >= 0 ? current + expr->first_spread_index()
2533  : end;
2534  int array_index = 0;
2535  for (; current != first_spread_or_end; ++current, array_index++) {
2536  Expression* subexpr = *current;
2537  DCHECK(!subexpr->IsSpread());
2538  // Skip the constants.
2539  if (subexpr->IsCompileTimeValue()) continue;
2540 
2541  builder()
2542  ->LoadLiteral(Smi::FromInt(array_index))
2543  .StoreAccumulatorInRegister(index);
2544  VisitForAccumulatorValue(subexpr);
2545  builder()->StoreInArrayLiteral(array, index,
2546  feedback_index(element_slot.Get()));
2547  }
2548 
2549  if (current != end) {
2550  // If there are remaining elements, prepare the index register
2551  // to store the next element, which comes from the first spread.
2552  builder()->LoadLiteral(array_index).StoreAccumulatorInRegister(index);
2553  }
2554  } else {
2555  // In other cases, we prepare an empty array to be filled in below.
2556  DCHECK(!elements->is_empty());
2557  int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2558  builder()
2559  ->CreateEmptyArrayLiteral(literal_index)
2560  .StoreAccumulatorInRegister(array);
2561  // Prepare the index for the first element.
2562  builder()->LoadLiteral(Smi::FromInt(0)).StoreAccumulatorInRegister(index);
2563  }
2564 
2565  // Now build insertions for the remaining elements from current to end.
2566  SharedFeedbackSlot index_slot(feedback_spec(), FeedbackSlotKind::kBinaryOp);
2567  SharedFeedbackSlot length_slot(
2568  feedback_spec(), feedback_spec()->GetStoreICSlot(LanguageMode::kStrict));
2569  for (; current != end; ++current) {
2570  Expression* subexpr = *current;
2571  if (subexpr->IsSpread()) {
2572  FeedbackSlot real_index_slot = index_slot.Get();
2573  BuildArrayLiteralSpread(subexpr->AsSpread(), array, index,
2574  real_index_slot, element_slot.Get());
2575  } else if (!subexpr->IsTheHoleLiteral()) {
2576  // literal[index++] = subexpr
2577  VisitForAccumulatorValue(subexpr);
2578  builder()
2579  ->StoreInArrayLiteral(array, index,
2580  feedback_index(element_slot.Get()))
2581  .LoadAccumulatorWithRegister(index);
2582  // Only increase the index if we are not the last element.
2583  if (current + 1 != end) {
2584  builder()
2585  ->UnaryOperation(Token::INC, feedback_index(index_slot.Get()))
2586  .StoreAccumulatorInRegister(index);
2587  }
2588  } else {
2589  // literal.length = ++index
2590  // length_slot is only used when there are holes.
2591  auto length = ast_string_constants()->length_string();
2592  builder()
2593  ->LoadAccumulatorWithRegister(index)
2594  .UnaryOperation(Token::INC, feedback_index(index_slot.Get()))
2595  .StoreAccumulatorInRegister(index)
2596  .StoreNamedProperty(array, length, feedback_index(length_slot.Get()),
2597  LanguageMode::kStrict);
2598  }
2599  }
2600 
2601  builder()->LoadAccumulatorWithRegister(array);
2602 }
2603 
2604 void BytecodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
2605  expr->InitDepthAndFlags();
2606  BuildCreateArrayLiteral(expr->values(), expr);
2607 }
2608 
2609 void BytecodeGenerator::VisitStoreInArrayLiteral(StoreInArrayLiteral* expr) {
2610  builder()->SetExpressionAsStatementPosition(expr);
2611  RegisterAllocationScope register_scope(this);
2612  Register array = register_allocator()->NewRegister();
2613  Register index = register_allocator()->NewRegister();
2614  VisitForRegisterValue(expr->array(), array);
2615  VisitForRegisterValue(expr->index(), index);
2616  VisitForAccumulatorValue(expr->value());
2617  builder()->StoreInArrayLiteral(
2618  array, index,
2619  feedback_index(feedback_spec()->AddStoreInArrayLiteralICSlot()));
2620 }
2621 
2622 void BytecodeGenerator::VisitVariableProxy(VariableProxy* proxy) {
2623  builder()->SetExpressionPosition(proxy);
2624  BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
2625 }
2626 
2627 void BytecodeGenerator::BuildVariableLoad(Variable* variable,
2628  HoleCheckMode hole_check_mode,
2629  TypeofMode typeof_mode) {
2630  switch (variable->location()) {
2631  case VariableLocation::LOCAL: {
2632  Register source(builder()->Local(variable->index()));
2633  // We need to load the variable into the accumulator, even when in a
2634  // VisitForRegisterScope, in order to avoid register aliasing if
2635  // subsequent expressions assign to the same variable.
2636  builder()->LoadAccumulatorWithRegister(source);
2637  if (hole_check_mode == HoleCheckMode::kRequired) {
2638  BuildThrowIfHole(variable);
2639  }
2640  break;
2641  }
2642  case VariableLocation::PARAMETER: {
2643  Register source;
2644  if (variable->IsReceiver()) {
2645  source = builder()->Receiver();
2646  } else {
2647  source = builder()->Parameter(variable->index());
2648  }
2649  // We need to load the variable into the accumulator, even when in a
2650  // VisitForRegisterScope, in order to avoid register aliasing if
2651  // subsequent expressions assign to the same variable.
2652  builder()->LoadAccumulatorWithRegister(source);
2653  if (hole_check_mode == HoleCheckMode::kRequired) {
2654  BuildThrowIfHole(variable);
2655  }
2656  break;
2657  }
2658  case VariableLocation::UNALLOCATED: {
2659  // The global identifier "undefined" is immutable. Everything
2660  // else could be reassigned. For performance, we do a pointer comparison
2661  // rather than checking if the raw_name is really "undefined".
2662  if (variable->raw_name() == ast_string_constants()->undefined_string()) {
2663  builder()->LoadUndefined();
2664  } else {
2665  FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
2666  builder()->LoadGlobal(variable->raw_name(), feedback_index(slot),
2667  typeof_mode);
2668  }
2669  break;
2670  }
2671  case VariableLocation::CONTEXT: {
2672  int depth = execution_context()->ContextChainDepth(variable->scope());
2673  ContextScope* context = execution_context()->Previous(depth);
2674  Register context_reg;
2675  if (context) {
2676  context_reg = context->reg();
2677  depth = 0;
2678  } else {
2679  context_reg = execution_context()->reg();
2680  }
2681 
2682  BytecodeArrayBuilder::ContextSlotMutability immutable =
2683  (variable->maybe_assigned() == kNotAssigned)
2684  ? BytecodeArrayBuilder::kImmutableSlot
2685  : BytecodeArrayBuilder::kMutableSlot;
2686 
2687  builder()->LoadContextSlot(context_reg, variable->index(), depth,
2688  immutable);
2689  if (hole_check_mode == HoleCheckMode::kRequired) {
2690  BuildThrowIfHole(variable);
2691  }
2692  break;
2693  }
2694  case VariableLocation::LOOKUP: {
2695  switch (variable->mode()) {
2696  case VariableMode::kDynamicLocal: {
2697  Variable* local_variable = variable->local_if_not_shadowed();
2698  int depth =
2699  execution_context()->ContextChainDepth(local_variable->scope());
2700  builder()->LoadLookupContextSlot(variable->raw_name(), typeof_mode,
2701  local_variable->index(), depth);
2702  if (hole_check_mode == HoleCheckMode::kRequired) {
2703  BuildThrowIfHole(variable);
2704  }
2705  break;
2706  }
2707  case VariableMode::kDynamicGlobal: {
2708  int depth =
2709  current_scope()->ContextChainLengthUntilOutermostSloppyEval();
2710  FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
2711  builder()->LoadLookupGlobalSlot(variable->raw_name(), typeof_mode,
2712  feedback_index(slot), depth);
2713  break;
2714  }
2715  default:
2716  builder()->LoadLookupSlot(variable->raw_name(), typeof_mode);
2717  }
2718  break;
2719  }
2720  case VariableLocation::MODULE: {
2721  int depth = execution_context()->ContextChainDepth(variable->scope());
2722  builder()->LoadModuleVariable(variable->index(), depth);
2723  if (hole_check_mode == HoleCheckMode::kRequired) {
2724  BuildThrowIfHole(variable);
2725  }
2726  break;
2727  }
2728  }
2729 }
2730 
2731 void BytecodeGenerator::BuildVariableLoadForAccumulatorValue(
2732  Variable* variable, HoleCheckMode hole_check_mode, TypeofMode typeof_mode) {
2733  ValueResultScope accumulator_result(this);
2734  BuildVariableLoad(variable, hole_check_mode, typeof_mode);
2735 }
2736 
2737 void BytecodeGenerator::BuildReturn(int source_position) {
2738  if (FLAG_trace) {
2739  RegisterAllocationScope register_scope(this);
2740  Register result = register_allocator()->NewRegister();
2741  // Runtime returns {result} value, preserving accumulator.
2742  builder()->StoreAccumulatorInRegister(result).CallRuntime(
2743  Runtime::kTraceExit, result);
2744  }
2745  if (info()->collect_type_profile()) {
2746  builder()->CollectTypeProfile(info()->literal()->return_position());
2747  }
2748  builder()->SetReturnPosition(source_position, info()->literal());
2749  builder()->Return();
2750 }
2751 
2752 void BytecodeGenerator::BuildAsyncReturn(int source_position) {
2753  RegisterAllocationScope register_scope(this);
2754 
2755  if (IsAsyncGeneratorFunction(info()->literal()->kind())) {
2756  RegisterList args = register_allocator()->NewRegisterList(3);
2757  builder()
2758  ->MoveRegister(generator_object(), args[0]) // generator
2759  .StoreAccumulatorInRegister(args[1]) // value
2760  .LoadTrue()
2761  .StoreAccumulatorInRegister(args[2]) // done
2762  .CallRuntime(Runtime::kInlineAsyncGeneratorResolve, args);
2763  } else {
2764  DCHECK(IsAsyncFunction(info()->literal()->kind()));
2765  RegisterList args = register_allocator()->NewRegisterList(3);
2766  builder()
2767  ->MoveRegister(generator_object(), args[0]) // generator
2768  .StoreAccumulatorInRegister(args[1]) // value
2769  .LoadBoolean(info()->literal()->CanSuspend())
2770  .StoreAccumulatorInRegister(args[2]) // can_suspend
2771  .CallRuntime(Runtime::kInlineAsyncFunctionResolve, args);
2772  }
2773 
2774  BuildReturn(source_position);
2775 }
2776 
2777 void BytecodeGenerator::BuildReThrow() { builder()->ReThrow(); }
2778 
2779 void BytecodeGenerator::BuildThrowIfHole(Variable* variable) {
2780  if (variable->is_this()) {
2781  DCHECK(variable->mode() == VariableMode::kConst);
2782  builder()->ThrowSuperNotCalledIfHole();
2783  } else {
2784  builder()->ThrowReferenceErrorIfHole(variable->raw_name());
2785  }
2786 }
2787 
2788 void BytecodeGenerator::BuildHoleCheckForVariableAssignment(Variable* variable,
2789  Token::Value op) {
2790  if (variable->is_this() && variable->mode() == VariableMode::kConst &&
2791  op == Token::INIT) {
2792  // Perform an initialization check for 'this'. 'this' variable is the
2793  // only variable able to trigger bind operations outside the TDZ
2794  // via 'super' calls.
2795  builder()->ThrowSuperAlreadyCalledIfNotHole();
2796  } else {
2797  // Perform an initialization check for let/const declared variables.
2798  // E.g. let x = (x = 20); is not allowed.
2799  DCHECK(IsLexicalVariableMode(variable->mode()));
2800  BuildThrowIfHole(variable);
2801  }
2802 }
2803 
2804 void BytecodeGenerator::BuildVariableAssignment(
2805  Variable* variable, Token::Value op, HoleCheckMode hole_check_mode,
2806  LookupHoistingMode lookup_hoisting_mode) {
2807  VariableMode mode = variable->mode();
2808  RegisterAllocationScope assignment_register_scope(this);
2809  BytecodeLabel end_label;
2810  switch (variable->location()) {
2811  case VariableLocation::PARAMETER:
2812  case VariableLocation::LOCAL: {
2813  Register destination;
2814  if (VariableLocation::PARAMETER == variable->location()) {
2815  if (variable->IsReceiver()) {
2816  destination = builder()->Receiver();
2817  } else {
2818  destination = builder()->Parameter(variable->index());
2819  }
2820  } else {
2821  destination = builder()->Local(variable->index());
2822  }
2823 
2824  if (hole_check_mode == HoleCheckMode::kRequired) {
2825  // Load destination to check for hole.
2826  Register value_temp = register_allocator()->NewRegister();
2827  builder()
2828  ->StoreAccumulatorInRegister(value_temp)
2829  .LoadAccumulatorWithRegister(destination);
2830 
2831  BuildHoleCheckForVariableAssignment(variable, op);
2832  builder()->LoadAccumulatorWithRegister(value_temp);
2833  }
2834 
2835  if (mode != VariableMode::kConst || op == Token::INIT) {
2836  builder()->StoreAccumulatorInRegister(destination);
2837  } else if (variable->throw_on_const_assignment(language_mode())) {
2838  builder()->CallRuntime(Runtime::kThrowConstAssignError);
2839  }
2840  break;
2841  }
2842  case VariableLocation::UNALLOCATED: {
2843  FeedbackSlot slot = GetCachedStoreGlobalICSlot(language_mode(), variable);
2844  builder()->StoreGlobal(variable->raw_name(), feedback_index(slot));
2845  break;
2846  }
2847  case VariableLocation::CONTEXT: {
2848  int depth = execution_context()->ContextChainDepth(variable->scope());
2849  ContextScope* context = execution_context()->Previous(depth);
2850  Register context_reg;
2851 
2852  if (context) {
2853  context_reg = context->reg();
2854  depth = 0;
2855  } else {
2856  context_reg = execution_context()->reg();
2857  }
2858 
2859  if (hole_check_mode == HoleCheckMode::kRequired) {
2860  // Load destination to check for hole.
2861  Register value_temp = register_allocator()->NewRegister();
2862  builder()
2863  ->StoreAccumulatorInRegister(value_temp)
2864  .LoadContextSlot(context_reg, variable->index(), depth,
2865  BytecodeArrayBuilder::kMutableSlot);
2866 
2867  BuildHoleCheckForVariableAssignment(variable, op);
2868  builder()->LoadAccumulatorWithRegister(value_temp);
2869  }
2870 
2871  if (mode != VariableMode::kConst || op == Token::INIT) {
2872  builder()->StoreContextSlot(context_reg, variable->index(), depth);
2873  } else if (variable->throw_on_const_assignment(language_mode())) {
2874  builder()->CallRuntime(Runtime::kThrowConstAssignError);
2875  }
2876  break;
2877  }
2878  case VariableLocation::LOOKUP: {
2879  builder()->StoreLookupSlot(variable->raw_name(), language_mode(),
2880  lookup_hoisting_mode);
2881  break;
2882  }
2883  case VariableLocation::MODULE: {
2884  DCHECK(IsDeclaredVariableMode(mode));
2885 
2886  if (mode == VariableMode::kConst && op != Token::INIT) {
2887  builder()->CallRuntime(Runtime::kThrowConstAssignError);
2888  break;
2889  }
2890 
2891  // If we don't throw above, we know that we're dealing with an
2892  // export because imports are const and we do not generate initializing
2893  // assignments for them.
2894  DCHECK(variable->IsExport());
2895 
2896  int depth = execution_context()->ContextChainDepth(variable->scope());
2897  if (hole_check_mode == HoleCheckMode::kRequired) {
2898  Register value_temp = register_allocator()->NewRegister();
2899  builder()
2900  ->StoreAccumulatorInRegister(value_temp)
2901  .LoadModuleVariable(variable->index(), depth);
2902  BuildHoleCheckForVariableAssignment(variable, op);
2903  builder()->LoadAccumulatorWithRegister(value_temp);
2904  }
2905  builder()->StoreModuleVariable(variable->index(), depth);
2906  break;
2907  }
2908  }
2909 }
2910 
2911 void BytecodeGenerator::BuildLoadNamedProperty(Property* property,
2912  Register object,
2913  const AstRawString* name) {
2914  if (ShouldOptimizeAsOneShot()) {
2915  builder()->LoadNamedPropertyNoFeedback(object, name);
2916  } else {
2917  FeedbackSlot slot = GetCachedLoadICSlot(property->obj(), name);
2918  builder()->LoadNamedProperty(object, name, feedback_index(slot));
2919  }
2920 }
2921 
2922 void BytecodeGenerator::BuildStoreNamedProperty(Property* property,
2923  Register object,
2924  const AstRawString* name) {
2925  Register value;
2926  if (!execution_result()->IsEffect()) {
2927  value = register_allocator()->NewRegister();
2928  builder()->StoreAccumulatorInRegister(value);
2929  }
2930 
2931  if (ShouldOptimizeAsOneShot()) {
2932  builder()->StoreNamedPropertyNoFeedback(object, name, language_mode());
2933  } else {
2934  FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
2935  builder()->StoreNamedProperty(object, name, feedback_index(slot),
2936  language_mode());
2937  }
2938 
2939  if (!execution_result()->IsEffect()) {
2940  builder()->LoadAccumulatorWithRegister(value);
2941  }
2942 }
2943 
2944 void BytecodeGenerator::VisitAssignment(Assignment* expr) {
2945  DCHECK(expr->target()->IsValidReferenceExpression() ||
2946  (expr->op() == Token::INIT && expr->target()->IsVariableProxy() &&
2947  expr->target()->AsVariableProxy()->is_this()));
2948  Register object, key;
2949  RegisterList super_property_args;
2950  const AstRawString* name;
2951 
2952  // Left-hand side can only be a property, a global or a variable slot.
2953  Property* property = expr->target()->AsProperty();
2954  LhsKind assign_type = Property::GetAssignType(property);
2955 
2956  // Evaluate LHS expression.
2957  switch (assign_type) {
2958  case VARIABLE:
2959  // Nothing to do to evaluate variable assignment LHS.
2960  break;
2961  case NAMED_PROPERTY: {
2962  object = VisitForRegisterValue(property->obj());
2963  name = property->key()->AsLiteral()->AsRawPropertyName();
2964  break;
2965  }
2966  case KEYED_PROPERTY: {
2967  object = VisitForRegisterValue(property->obj());
2968  key = VisitForRegisterValue(property->key());
2969  break;
2970  }
2971  case NAMED_SUPER_PROPERTY: {
2972  super_property_args = register_allocator()->NewRegisterList(4);
2973  SuperPropertyReference* super_property =
2974  property->obj()->AsSuperPropertyReference();
2975  VisitForRegisterValue(super_property->this_var(), super_property_args[0]);
2976  VisitForRegisterValue(super_property->home_object(),
2977  super_property_args[1]);
2978  builder()
2979  ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
2980  .StoreAccumulatorInRegister(super_property_args[2]);
2981  break;
2982  }
2983  case KEYED_SUPER_PROPERTY: {
2984  super_property_args = register_allocator()->NewRegisterList(4);
2985  SuperPropertyReference* super_property =
2986  property->obj()->AsSuperPropertyReference();
2987  VisitForRegisterValue(super_property->this_var(), super_property_args[0]);
2988  VisitForRegisterValue(super_property->home_object(),
2989  super_property_args[1]);
2990  VisitForRegisterValue(property->key(), super_property_args[2]);
2991  break;
2992  }
2993  }
2994 
2995  // Evaluate the value and potentially handle compound assignments by loading
2996  // the left-hand side value and performing a binary operation.
2997  if (expr->IsCompoundAssignment()) {
2998  switch (assign_type) {
2999  case VARIABLE: {
3000  VariableProxy* proxy = expr->target()->AsVariableProxy();
3001  BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
3002  break;
3003  }
3004  case NAMED_PROPERTY: {
3005  BuildLoadNamedProperty(property, object, name);
3006  break;
3007  }
3008  case KEYED_PROPERTY: {
3009  // Key is already in accumulator at this point due to evaluating the
3010  // LHS above.
3011  FeedbackSlot slot = feedback_spec()->AddKeyedLoadICSlot();
3012  builder()->LoadKeyedProperty(object, feedback_index(slot));
3013  break;
3014  }
3015  case NAMED_SUPER_PROPERTY: {
3016  builder()->CallRuntime(Runtime::kLoadFromSuper,
3017  super_property_args.Truncate(3));
3018  break;
3019  }
3020  case KEYED_SUPER_PROPERTY: {
3021  builder()->CallRuntime(Runtime::kLoadKeyedFromSuper,
3022  super_property_args.Truncate(3));
3023  break;
3024  }
3025  }
3026  BinaryOperation* binop = expr->AsCompoundAssignment()->binary_operation();
3027  FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
3028  if (expr->value()->IsSmiLiteral()) {
3029  builder()->BinaryOperationSmiLiteral(
3030  binop->op(), expr->value()->AsLiteral()->AsSmiLiteral(),
3031  feedback_index(slot));
3032  } else {
3033  Register old_value = register_allocator()->NewRegister();
3034  builder()->StoreAccumulatorInRegister(old_value);
3035  VisitForAccumulatorValue(expr->value());
3036  builder()->BinaryOperation(binop->op(), old_value, feedback_index(slot));
3037  }
3038  } else {
3039  VisitForAccumulatorValue(expr->value());
3040  }
3041 
3042  // Store the value.
3043  builder()->SetExpressionPosition(expr);
3044  switch (assign_type) {
3045  case VARIABLE: {
3046  // TODO(oth): The BuildVariableAssignment() call is hard to reason about.
3047  // Is the value in the accumulator safe? Yes, but scary.
3048  VariableProxy* proxy = expr->target()->AsVariableProxy();
3049  BuildVariableAssignment(proxy->var(), expr->op(),
3050  proxy->hole_check_mode(),
3051  expr->lookup_hoisting_mode());
3052  break;
3053  }
3054  case NAMED_PROPERTY: {
3055  BuildStoreNamedProperty(property, object, name);
3056  break;
3057  }
3058  case KEYED_PROPERTY: {
3059  FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
3060  Register value;
3061  if (!execution_result()->IsEffect()) {
3062  value = register_allocator()->NewRegister();
3063  builder()->StoreAccumulatorInRegister(value);
3064  }
3065  builder()->StoreKeyedProperty(object, key, feedback_index(slot),
3066  language_mode());
3067  if (!execution_result()->IsEffect()) {
3068  builder()->LoadAccumulatorWithRegister(value);
3069  }
3070  break;
3071  }
3072  case NAMED_SUPER_PROPERTY: {
3073  builder()
3074  ->StoreAccumulatorInRegister(super_property_args[3])
3075  .CallRuntime(StoreToSuperRuntimeId(), super_property_args);
3076  break;
3077  }
3078  case KEYED_SUPER_PROPERTY: {
3079  builder()
3080  ->StoreAccumulatorInRegister(super_property_args[3])
3081  .CallRuntime(StoreKeyedToSuperRuntimeId(), super_property_args);
3082  break;
3083  }
3084  }
3085 }
3086 
3087 void BytecodeGenerator::VisitCompoundAssignment(CompoundAssignment* expr) {
3088  VisitAssignment(expr);
3089 }
3090 
3091 // Suspends the generator to resume at the next suspend_id, with output stored
3092 // in the accumulator. When the generator is resumed, the sent value is loaded
3093 // in the accumulator.
3094 void BytecodeGenerator::BuildSuspendPoint(Expression* suspend_expr) {
3095  const int suspend_id = suspend_count_++;
3096 
3097  RegisterList registers = register_allocator()->AllLiveRegisters();
3098 
3099  // Save context, registers, and state. This bytecode then returns the value
3100  // in the accumulator.
3101  builder()->SetExpressionPosition(suspend_expr);
3102  builder()->SuspendGenerator(generator_object(), registers, suspend_id);
3103 
3104  // Upon resume, we continue here.
3105  builder()->Bind(generator_jump_table_, suspend_id);
3106 
3107  // Clobbers all registers and sets the accumulator to the
3108  // [[input_or_debug_pos]] slot of the generator object.
3109  builder()->ResumeGenerator(generator_object(), registers);
3110 }
3111 
3112 void BytecodeGenerator::VisitYield(Yield* expr) {
3113  builder()->SetExpressionPosition(expr);
3114  VisitForAccumulatorValue(expr->expression());
3115 
3116  // If this is not the first yield
3117  if (suspend_count_ > 0) {
3118  if (IsAsyncGeneratorFunction(function_kind())) {
3119  // AsyncGenerator yields (with the exception of the initial yield)
3120  // delegate work to the AsyncGeneratorYield stub, which Awaits the operand
3121  // and on success, wraps the value in an IteratorResult.
3122  RegisterAllocationScope register_scope(this);
3123  RegisterList args = register_allocator()->NewRegisterList(3);
3124  builder()
3125  ->MoveRegister(generator_object(), args[0]) // generator
3126  .StoreAccumulatorInRegister(args[1]) // value
3127  .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
3128  .StoreAccumulatorInRegister(args[2]) // is_caught
3129  .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
3130  } else {
3131  // Generator yields (with the exception of the initial yield) wrap the
3132  // value into IteratorResult.
3133  RegisterAllocationScope register_scope(this);
3134  RegisterList args = register_allocator()->NewRegisterList(2);
3135  builder()
3136  ->StoreAccumulatorInRegister(args[0]) // value
3137  .LoadFalse()
3138  .StoreAccumulatorInRegister(args[1]) // done
3139  .CallRuntime(Runtime::kInlineCreateIterResultObject, args);
3140  }
3141  }
3142 
3143  BuildSuspendPoint(expr);
3144  // At this point, the generator has been resumed, with the received value in
3145  // the accumulator.
3146 
3147  // TODO(caitp): remove once yield* desugaring for async generators is handled
3148  // in BytecodeGenerator.
3149  if (expr->on_abrupt_resume() == Yield::kNoControl) {
3150  DCHECK(IsAsyncGeneratorFunction(function_kind()));
3151  return;
3152  }
3153 
3154  Register input = register_allocator()->NewRegister();
3155  builder()->StoreAccumulatorInRegister(input).CallRuntime(
3156  Runtime::kInlineGeneratorGetResumeMode, generator_object());
3157 
3158  // Now dispatch on resume mode.
3159  STATIC_ASSERT(JSGeneratorObject::kNext + 1 == JSGeneratorObject::kReturn);
3160  BytecodeJumpTable* jump_table =
3161  builder()->AllocateJumpTable(2, JSGeneratorObject::kNext);
3162 
3163  builder()->SwitchOnSmiNoFeedback(jump_table);
3164 
3165  {
3166  // Resume with throw (switch fallthrough).
3167  // TODO(leszeks): Add a debug-only check that the accumulator is
3168  // JSGeneratorObject::kThrow.
3169  builder()->SetExpressionPosition(expr);
3170  builder()->LoadAccumulatorWithRegister(input);
3171  builder()->Throw();
3172  }
3173 
3174  {
3175  // Resume with return.
3176  builder()->Bind(jump_table, JSGeneratorObject::kReturn);
3177  builder()->LoadAccumulatorWithRegister(input);
3178  if (IsAsyncGeneratorFunction(function_kind())) {
3179  execution_control()->AsyncReturnAccumulator();
3180  } else {
3181  execution_control()->ReturnAccumulator();
3182  }
3183  }
3184 
3185  {
3186  // Resume with next.
3187  builder()->Bind(jump_table, JSGeneratorObject::kNext);
3188  BuildIncrementBlockCoverageCounterIfEnabled(expr,
3189  SourceRangeKind::kContinuation);
3190  builder()->LoadAccumulatorWithRegister(input);
3191  }
3192 }
3193 
3194 // Desugaring of (yield* iterable)
3195 //
3196 // do {
3197 // const kNext = 0;
3198 // const kReturn = 1;
3199 // const kThrow = 2;
3200 //
3201 // let output; // uninitialized
3202 //
3203 // let iteratorRecord = GetIterator(iterable);
3204 // let iterator = iteratorRecord.[[Iterator]];
3205 // let next = iteratorRecord.[[NextMethod]];
3206 // let input = undefined;
3207 // let resumeMode = kNext;
3208 //
3209 // while (true) {
3210 // // From the generator to the iterator:
3211 // // Forward input according to resumeMode and obtain output.
3212 // switch (resumeMode) {
3213 // case kNext:
3214 // output = next.[[Call]](iterator, « »);;
3215 // break;
3216 // case kReturn:
3217 // let iteratorReturn = iterator.return;
3218 // if (IS_NULL_OR_UNDEFINED(iteratorReturn)) return input;
3219 // output = iteratorReturn.[[Call]](iterator, «input»);
3220 // break;
3221 // case kThrow:
3222 // let iteratorThrow = iterator.throw;
3223 // if (IS_NULL_OR_UNDEFINED(iteratorThrow)) {
3224 // let iteratorReturn = iterator.return;
3225 // if (!IS_NULL_OR_UNDEFINED(iteratorReturn)) {
3226 // output = iteratorReturn.[[Call]](iterator, « »);
3227 // if (IS_ASYNC_GENERATOR) output = await output;
3228 // if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
3229 // }
3230 // throw MakeTypeError(kThrowMethodMissing);
3231 // }
3232 // output = iteratorThrow.[[Call]](iterator, «input»);
3233 // break;
3234 // }
3235 //
3236 // if (IS_ASYNC_GENERATOR) output = await output;
3237 // if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
3238 // if (output.done) break;
3239 //
3240 // // From the generator to its user:
3241 // // Forward output, receive new input, and determine resume mode.
3242 // if (IS_ASYNC_GENERATOR) {
3243 // // AsyncGeneratorYield abstract operation awaits the operand before
3244 // // resolving the promise for the current AsyncGeneratorRequest.
3245 // %_AsyncGeneratorYield(output.value)
3246 // }
3247 // input = Suspend(output);
3248 // resumeMode = %GeneratorGetResumeMode();
3249 // }
3250 //
3251 // if (resumeMode === kReturn) {
3252 // return output.value;
3253 // }
3254 // output.value
3255 // }
3256 void BytecodeGenerator::VisitYieldStar(YieldStar* expr) {
3257  Register output = register_allocator()->NewRegister();
3258  Register resume_mode = register_allocator()->NewRegister();
3259  IteratorType iterator_type = IsAsyncGeneratorFunction(function_kind())
3260  ? IteratorType::kAsync
3261  : IteratorType::kNormal;
3262 
3263  {
3264  RegisterAllocationScope register_scope(this);
3265  RegisterList iterator_and_input = register_allocator()->NewRegisterList(2);
3266  IteratorRecord iterator = BuildGetIteratorRecord(
3267  expr->expression(),
3268  register_allocator()->NewRegister() /* next method */,
3269  iterator_and_input[0], iterator_type);
3270 
3271  Register input = iterator_and_input[1];
3272  builder()->LoadUndefined().StoreAccumulatorInRegister(input);
3273  builder()
3274  ->LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
3275  .StoreAccumulatorInRegister(resume_mode);
3276 
3277  {
3278  // This loop builder does not construct counters as the loop is not
3279  // visible to the user, and we therefore neither pass the block coverage
3280  // builder nor the expression.
3281  //
3282  // In addition to the normal suspend for yield*, a yield* in an async
3283  // generator has 2 additional suspends:
3284  // - One for awaiting the iterator result of closing the generator when
3285  // resumed with a "throw" completion, and a throw method is not
3286  // present on the delegated iterator
3287  // - One for awaiting the iterator result yielded by the delegated
3288  // iterator
3289 
3290  LoopBuilder loop(builder(), nullptr, nullptr);
3291  loop.LoopHeader();
3292 
3293  {
3294  BytecodeLabels after_switch(zone());
3295  BytecodeJumpTable* switch_jump_table =
3296  builder()->AllocateJumpTable(2, 1);
3297 
3298  builder()
3299  ->LoadAccumulatorWithRegister(resume_mode)
3300  .SwitchOnSmiNoFeedback(switch_jump_table);
3301 
3302  // Fallthrough to default case.
3303  // TODO(tebbi): Add debug code to check that {resume_mode} really is
3304  // {JSGeneratorObject::kNext} in this case.
3305  STATIC_ASSERT(JSGeneratorObject::kNext == 0);
3306  {
3307  FeedbackSlot slot = feedback_spec()->AddCallICSlot();
3308  builder()->CallProperty(iterator.next(), iterator_and_input,
3309  feedback_index(slot));
3310  builder()->Jump(after_switch.New());
3311  }
3312 
3313  STATIC_ASSERT(JSGeneratorObject::kReturn == 1);
3314  builder()->Bind(switch_jump_table, JSGeneratorObject::kReturn);
3315  {
3316  const AstRawString* return_string =
3317  ast_string_constants()->return_string();
3318  BytecodeLabels no_return_method(zone());
3319 
3320  BuildCallIteratorMethod(iterator.object(), return_string,
3321  iterator_and_input, after_switch.New(),
3322  &no_return_method);
3323  no_return_method.Bind(builder());
3324  builder()->LoadAccumulatorWithRegister(input);
3325  if (iterator_type == IteratorType::kAsync) {
3326  execution_control()->AsyncReturnAccumulator();
3327  } else {
3328  execution_control()->ReturnAccumulator();
3329  }
3330  }
3331 
3332  STATIC_ASSERT(JSGeneratorObject::kThrow == 2);
3333  builder()->Bind(switch_jump_table, JSGeneratorObject::kThrow);
3334  {
3335  const AstRawString* throw_string =
3336  ast_string_constants()->throw_string();
3337  BytecodeLabels no_throw_method(zone());
3338  BuildCallIteratorMethod(iterator.object(), throw_string,
3339  iterator_and_input, after_switch.New(),
3340  &no_throw_method);
3341 
3342  // If there is no "throw" method, perform IteratorClose, and finally
3343  // throw a TypeError.
3344  no_throw_method.Bind(builder());
3345  BuildIteratorClose(iterator, expr);
3346  builder()->CallRuntime(Runtime::kThrowThrowMethodMissing);
3347  }
3348 
3349  after_switch.Bind(builder());
3350  }
3351 
3352  if (iterator_type == IteratorType::kAsync) {
3353  // Await the result of the method invocation.
3354  BuildAwait(expr);
3355  }
3356 
3357  // Check that output is an object.
3358  BytecodeLabel check_if_done;
3359  builder()
3360  ->StoreAccumulatorInRegister(output)
3361  .JumpIfJSReceiver(&check_if_done)
3362  .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, output);
3363 
3364  builder()->Bind(&check_if_done);
3365  // Break once output.done is true.
3366  builder()->LoadNamedProperty(
3367  output, ast_string_constants()->done_string(),
3368  feedback_index(feedback_spec()->AddLoadICSlot()));
3369 
3370  loop.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
3371 
3372  // Suspend the current generator.
3373  if (iterator_type == IteratorType::kNormal) {
3374  builder()->LoadAccumulatorWithRegister(output);
3375  } else {
3376  RegisterAllocationScope register_scope(this);
3377  DCHECK_EQ(iterator_type, IteratorType::kAsync);
3378  // If generatorKind is async, perform AsyncGeneratorYield(output.value),
3379  // which will await `output.value` before resolving the current
3380  // AsyncGeneratorRequest's promise.
3381  builder()->LoadNamedProperty(
3382  output, ast_string_constants()->value_string(),
3383  feedback_index(feedback_spec()->AddLoadICSlot()));
3384 
3385  RegisterList args = register_allocator()->NewRegisterList(3);
3386  builder()
3387  ->MoveRegister(generator_object(), args[0]) // generator
3388  .StoreAccumulatorInRegister(args[1]) // value
3389  .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
3390  .StoreAccumulatorInRegister(args[2]) // is_caught
3391  .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
3392  }
3393 
3394  BuildSuspendPoint(expr);
3395  builder()->StoreAccumulatorInRegister(input);
3396  builder()
3397  ->CallRuntime(Runtime::kInlineGeneratorGetResumeMode,
3398  generator_object())
3399  .StoreAccumulatorInRegister(resume_mode);
3400 
3401  loop.BindContinueTarget();
3402  loop.JumpToHeader(loop_depth_);
3403  }
3404  }
3405 
3406  // Decide if we trigger a return or if the yield* expression should just
3407  // produce a value.
3408  BytecodeLabel completion_is_output_value;
3409  Register output_value = register_allocator()->NewRegister();
3410  builder()
3411  ->LoadNamedProperty(output, ast_string_constants()->value_string(),
3412  feedback_index(feedback_spec()->AddLoadICSlot()))
3413  .StoreAccumulatorInRegister(output_value)
3414  .LoadLiteral(Smi::FromInt(JSGeneratorObject::kReturn))
3415  .CompareReference(resume_mode)
3416  .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &completion_is_output_value)
3417  .LoadAccumulatorWithRegister(output_value);
3418  if (iterator_type == IteratorType::kAsync) {
3419  execution_control()->AsyncReturnAccumulator();
3420  } else {
3421  execution_control()->ReturnAccumulator();
3422  }
3423 
3424  builder()->Bind(&completion_is_output_value);
3425  BuildIncrementBlockCoverageCounterIfEnabled(expr,
3426  SourceRangeKind::kContinuation);
3427  builder()->LoadAccumulatorWithRegister(output_value);
3428 }
3429 
3430 void BytecodeGenerator::BuildAwait(Expression* await_expr) {
3431  // Rather than HandlerTable::UNCAUGHT, async functions use
3432  // HandlerTable::ASYNC_AWAIT to communicate that top-level exceptions are
3433  // transformed into promise rejections. This is necessary to prevent emitting
3434  // multiple debug events for the same uncaught exception. There is no point
3435  // in the body of an async function where catch prediction is
3436  // HandlerTable::UNCAUGHT.
3437  DCHECK(catch_prediction() != HandlerTable::UNCAUGHT);
3438 
3439  {
3440  // Await(operand) and suspend.
3441  RegisterAllocationScope register_scope(this);
3442 
3443  Runtime::FunctionId await_intrinsic_id;
3444  if (IsAsyncGeneratorFunction(function_kind())) {
3445  await_intrinsic_id = catch_prediction() == HandlerTable::ASYNC_AWAIT
3446  ? Runtime::kInlineAsyncGeneratorAwaitUncaught
3447  : Runtime::kInlineAsyncGeneratorAwaitCaught;
3448  } else {
3449  await_intrinsic_id = catch_prediction() == HandlerTable::ASYNC_AWAIT
3450  ? Runtime::kInlineAsyncFunctionAwaitUncaught
3451  : Runtime::kInlineAsyncFunctionAwaitCaught;
3452  }
3453  RegisterList args = register_allocator()->NewRegisterList(2);
3454  builder()
3455  ->MoveRegister(generator_object(), args[0])
3456  .StoreAccumulatorInRegister(args[1])
3457  .CallRuntime(await_intrinsic_id, args);
3458  }
3459 
3460  BuildSuspendPoint(await_expr);
3461 
3462  Register input = register_allocator()->NewRegister();
3463  Register resume_mode = register_allocator()->NewRegister();
3464 
3465  // Now dispatch on resume mode.
3466  BytecodeLabel resume_next;
3467  builder()
3468  ->StoreAccumulatorInRegister(input)
3469  .CallRuntime(Runtime::kInlineGeneratorGetResumeMode, generator_object())
3470  .StoreAccumulatorInRegister(resume_mode)
3471  .LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
3472  .CompareReference(resume_mode)
3473  .JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &resume_next);
3474 
3475  // Resume with "throw" completion (rethrow the received value).
3476  // TODO(leszeks): Add a debug-only check that the accumulator is
3477  // JSGeneratorObject::kThrow.
3478  builder()->LoadAccumulatorWithRegister(input).ReThrow();
3479 
3480  // Resume with next.
3481  builder()->Bind(&resume_next);
3482  builder()->LoadAccumulatorWithRegister(input);
3483 }
3484 
3485 void BytecodeGenerator::VisitAwait(Await* expr) {
3486  builder()->SetExpressionPosition(expr);
3487  VisitForAccumulatorValue(expr->expression());
3488  BuildAwait(expr);
3489  BuildIncrementBlockCoverageCounterIfEnabled(expr,
3490  SourceRangeKind::kContinuation);
3491 }
3492 
3493 void BytecodeGenerator::VisitThrow(Throw* expr) {
3494  AllocateBlockCoverageSlotIfEnabled(expr, SourceRangeKind::kContinuation);
3495  VisitForAccumulatorValue(expr->exception());
3496  builder()->SetExpressionPosition(expr);
3497  builder()->Throw();
3498 }
3499 
3500 void BytecodeGenerator::VisitPropertyLoad(Register obj, Property* property) {
3501  LhsKind property_kind = Property::GetAssignType(property);
3502  switch (property_kind) {
3503  case VARIABLE:
3504  UNREACHABLE();
3505  case NAMED_PROPERTY: {
3506  builder()->SetExpressionPosition(property);
3507  const AstRawString* name =
3508  property->key()->AsLiteral()->AsRawPropertyName();
3509  BuildLoadNamedProperty(property, obj, name);
3510  break;
3511  }
3512  case KEYED_PROPERTY: {
3513  VisitForAccumulatorValue(property->key());
3514  builder()->SetExpressionPosition(property);
3515  builder()->LoadKeyedProperty(
3516  obj, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
3517  break;
3518  }
3519  case NAMED_SUPER_PROPERTY:
3520  VisitNamedSuperPropertyLoad(property, Register::invalid_value());
3521  break;
3522  case KEYED_SUPER_PROPERTY:
3523  VisitKeyedSuperPropertyLoad(property, Register::invalid_value());
3524  break;
3525  }
3526 }
3527 
3528 void BytecodeGenerator::VisitPropertyLoadForRegister(Register obj,
3529  Property* expr,
3530  Register destination) {
3531  ValueResultScope result_scope(this);
3532  VisitPropertyLoad(obj, expr);
3533  builder()->StoreAccumulatorInRegister(destination);
3534 }
3535 
3536 void BytecodeGenerator::VisitNamedSuperPropertyLoad(Property* property,
3537  Register opt_receiver_out) {
3538  RegisterAllocationScope register_scope(this);
3539  SuperPropertyReference* super_property =
3540  property->obj()->AsSuperPropertyReference();
3541  RegisterList args = register_allocator()->NewRegisterList(3);
3542  VisitForRegisterValue(super_property->this_var(), args[0]);
3543  VisitForRegisterValue(super_property->home_object(), args[1]);
3544 
3545  builder()->SetExpressionPosition(property);
3546  builder()
3547  ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
3548  .StoreAccumulatorInRegister(args[2])
3549  .CallRuntime(Runtime::kLoadFromSuper, args);
3550 
3551  if (opt_receiver_out.is_valid()) {
3552  builder()->MoveRegister(args[0], opt_receiver_out);
3553  }
3554 }
3555 
3556 void BytecodeGenerator::VisitKeyedSuperPropertyLoad(Property* property,
3557  Register opt_receiver_out) {
3558  RegisterAllocationScope register_scope(this);
3559  SuperPropertyReference* super_property =
3560  property->obj()->AsSuperPropertyReference();
3561  RegisterList args = register_allocator()->NewRegisterList(3);
3562  VisitForRegisterValue(super_property->this_var(), args[0]);
3563  VisitForRegisterValue(super_property->home_object(), args[1]);
3564  VisitForRegisterValue(property->key(), args[2]);
3565 
3566  builder()->SetExpressionPosition(property);
3567  builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, args);
3568 
3569  if (opt_receiver_out.is_valid()) {
3570  builder()->MoveRegister(args[0], opt_receiver_out);
3571  }
3572 }
3573 
3574 void BytecodeGenerator::VisitProperty(Property* expr) {
3575  LhsKind property_kind = Property::GetAssignType(expr);
3576  if (property_kind != NAMED_SUPER_PROPERTY &&
3577  property_kind != KEYED_SUPER_PROPERTY) {
3578  Register obj = VisitForRegisterValue(expr->obj());
3579  VisitPropertyLoad(obj, expr);
3580  } else {
3581  VisitPropertyLoad(Register::invalid_value(), expr);
3582  }
3583 }
3584 
3585 void BytecodeGenerator::VisitResolvedProperty(ResolvedProperty* expr) {
3586  // Handled by VisitCall().
3587  UNREACHABLE();
3588 }
3589 
3590 void BytecodeGenerator::VisitArguments(const ZonePtrList<Expression>* args,
3591  RegisterList* arg_regs) {
3592  // Visit arguments.
3593  for (int i = 0; i < static_cast<int>(args->length()); i++) {
3594  VisitAndPushIntoRegisterList(args->at(i), arg_regs);
3595  }
3596 }
3597 
3598 void BytecodeGenerator::VisitCall(Call* expr) {
3599  Expression* callee_expr = expr->expression();
3600  Call::CallType call_type = expr->GetCallType();
3601 
3602  if (call_type == Call::SUPER_CALL) {
3603  return VisitCallSuper(expr);
3604  }
3605 
3606  // Grow the args list as we visit receiver / arguments to avoid allocating all
3607  // the registers up-front. Otherwise these registers are unavailable during
3608  // receiver / argument visiting and we can end up with memory leaks due to
3609  // registers keeping objects alive.
3610  Register callee = register_allocator()->NewRegister();
3611  RegisterList args = register_allocator()->NewGrowableRegisterList();
3612 
3613  bool implicit_undefined_receiver = false;
3614  // When a call contains a spread, a Call AST node is only created if there is
3615  // exactly one spread, and it is the last argument.
3616  bool is_spread_call = expr->only_last_arg_is_spread();
3617  bool optimize_as_one_shot = ShouldOptimizeAsOneShot();
3618 
3619  // TODO(petermarshall): We have a lot of call bytecodes that are very similar,
3620  // see if we can reduce the number by adding a separate argument which
3621  // specifies the call type (e.g., property, spread, tailcall, etc.).
3622 
3623  // Prepare the callee and the receiver to the function call. This depends on
3624  // the semantics of the underlying call type.
3625  switch (call_type) {
3626  case Call::NAMED_PROPERTY_CALL:
3627  case Call::KEYED_PROPERTY_CALL: {
3628  Property* property = callee_expr->AsProperty();
3629  VisitAndPushIntoRegisterList(property->obj(), &args);
3630  VisitPropertyLoadForRegister(args.last_register(), property, callee);
3631  break;
3632  }
3633  case Call::RESOLVED_PROPERTY_CALL: {
3634  ResolvedProperty* resolved = callee_expr->AsResolvedProperty();
3635  VisitAndPushIntoRegisterList(resolved->object(), &args);
3636  VisitForAccumulatorValue(resolved->property());
3637  builder()->StoreAccumulatorInRegister(callee);
3638  break;
3639  }
3640  case Call::GLOBAL_CALL: {
3641  // Receiver is undefined for global calls.
3642  if (!is_spread_call && !optimize_as_one_shot) {
3643  implicit_undefined_receiver = true;
3644  } else {
3645  // TODO(leszeks): There's no special bytecode for tail calls or spread
3646  // calls with an undefined receiver, so just push undefined ourselves.
3647  BuildPushUndefinedIntoRegisterList(&args);
3648  }
3649  // Load callee as a global variable.
3650  VariableProxy* proxy = callee_expr->AsVariableProxy();
3651  BuildVariableLoadForAccumulatorValue(proxy->var(),
3652  proxy->hole_check_mode());
3653  builder()->StoreAccumulatorInRegister(callee);
3654  break;
3655  }
3656  case Call::WITH_CALL: {
3657  Register receiver = register_allocator()->GrowRegisterList(&args);
3658  DCHECK(callee_expr->AsVariableProxy()->var()->IsLookupSlot());
3659  {
3660  RegisterAllocationScope inner_register_scope(this);
3661  Register name = register_allocator()->NewRegister();
3662 
3663  // Call %LoadLookupSlotForCall to get the callee and receiver.
3664  RegisterList result_pair = register_allocator()->NewRegisterList(2);
3665  Variable* variable = callee_expr->AsVariableProxy()->var();
3666  builder()
3667  ->LoadLiteral(variable->raw_name())
3668  .StoreAccumulatorInRegister(name)
3669  .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, name,
3670  result_pair)
3671  .MoveRegister(result_pair[0], callee)
3672  .MoveRegister(result_pair[1], receiver);
3673  }
3674  break;
3675  }
3676  case Call::OTHER_CALL: {
3677  // Receiver is undefined for other calls.
3678  if (!is_spread_call && !optimize_as_one_shot) {
3679  implicit_undefined_receiver = true;
3680  } else {
3681  // TODO(leszeks): There's no special bytecode for tail calls or spread
3682  // calls with an undefined receiver, so just push undefined ourselves.
3683  BuildPushUndefinedIntoRegisterList(&args);
3684  }
3685  VisitForRegisterValue(callee_expr, callee);
3686  break;
3687  }
3688  case Call::NAMED_SUPER_PROPERTY_CALL: {
3689  Register receiver = register_allocator()->GrowRegisterList(&args);
3690  Property* property = callee_expr->AsProperty();
3691  VisitNamedSuperPropertyLoad(property, receiver);
3692  builder()->StoreAccumulatorInRegister(callee);
3693  break;
3694  }
3695  case Call::KEYED_SUPER_PROPERTY_CALL: {
3696  Register receiver = register_allocator()->GrowRegisterList(&args);
3697  Property* property = callee_expr->AsProperty();
3698  VisitKeyedSuperPropertyLoad(property, receiver);
3699  builder()->StoreAccumulatorInRegister(callee);
3700  break;
3701  }
3702  case Call::SUPER_CALL:
3703  UNREACHABLE();
3704  break;
3705  }
3706 
3707  // Evaluate all arguments to the function call and store in sequential args
3708  // registers.
3709  VisitArguments(expr->arguments(), &args);
3710  int reciever_arg_count = implicit_undefined_receiver ? 0 : 1;
3711  CHECK_EQ(reciever_arg_count + expr->arguments()->length(),
3712  args.register_count());
3713 
3714  // Resolve callee for a potential direct eval call. This block will mutate the
3715  // callee value.
3716  if (expr->is_possibly_eval() && expr->arguments()->length() > 0) {
3717  RegisterAllocationScope inner_register_scope(this);
3718  // Set up arguments for ResolvePossiblyDirectEval by copying callee, source
3719  // strings and function closure, and loading language and
3720  // position.
3721  Register first_arg = args[reciever_arg_count];
3722  RegisterList runtime_call_args = register_allocator()->NewRegisterList(6);
3723  builder()
3724  ->MoveRegister(callee, runtime_call_args[0])
3725  .MoveRegister(first_arg, runtime_call_args[1])
3726  .MoveRegister(Register::function_closure(), runtime_call_args[2])
3727  .LoadLiteral(Smi::FromEnum(language_mode()))
3728  .StoreAccumulatorInRegister(runtime_call_args[3])
3729  .LoadLiteral(Smi::FromInt(current_scope()->start_position()))
3730  .StoreAccumulatorInRegister(runtime_call_args[4])
3731  .LoadLiteral(Smi::FromInt(expr->position()))
3732  .StoreAccumulatorInRegister(runtime_call_args[5]);
3733 
3734  // Call ResolvePossiblyDirectEval and modify the callee.
3735  builder()
3736  ->CallRuntime(Runtime::kResolvePossiblyDirectEval, runtime_call_args)
3737  .StoreAccumulatorInRegister(callee);
3738  }
3739 
3740  builder()->SetExpressionPosition(expr);
3741 
3742  if (is_spread_call) {
3743  DCHECK(!implicit_undefined_receiver);
3744  builder()->CallWithSpread(callee, args,
3745  feedback_index(feedback_spec()->AddCallICSlot()));
3746  } else if (optimize_as_one_shot) {
3747  DCHECK(!implicit_undefined_receiver);
3748  builder()->CallNoFeedback(callee, args);
3749  } else if (call_type == Call::NAMED_PROPERTY_CALL ||
3750  call_type == Call::KEYED_PROPERTY_CALL ||
3751  call_type == Call::RESOLVED_PROPERTY_CALL) {
3752  DCHECK(!implicit_undefined_receiver);
3753  builder()->CallProperty(callee, args,
3754  feedback_index(feedback_spec()->AddCallICSlot()));
3755  } else if (implicit_undefined_receiver) {
3756  builder()->CallUndefinedReceiver(
3757  callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
3758  } else {
3759  builder()->CallAnyReceiver(
3760  callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
3761  }
3762 }
3763 
3764 void BytecodeGenerator::VisitCallSuper(Call* expr) {
3765  RegisterAllocationScope register_scope(this);
3766  SuperCallReference* super = expr->expression()->AsSuperCallReference();
3767  const ZonePtrList<Expression>* args = expr->arguments();
3768 
3769  int first_spread_index = 0;
3770  for (; first_spread_index < args->length(); first_spread_index++) {
3771  if (args->at(first_spread_index)->IsSpread()) break;
3772  }
3773 
3774  // Prepare the constructor to the super call.
3775  Register this_function = VisitForRegisterValue(super->this_function_var());
3776  Register constructor = register_allocator()->NewRegister();
3777  builder()
3778  ->LoadAccumulatorWithRegister(this_function)
3779  .GetSuperConstructor(constructor);
3780 
3781  if (first_spread_index < expr->arguments()->length() - 1) {
3782  // We rewrite something like
3783  // super(1, ...x, 2)
3784  // to
3785  // %reflect_construct(constructor, [1, ...x, 2], new_target)
3786  // That is, we implement (non-last-arg) spreads in super calls via our
3787  // mechanism for spreads in array literals.
3788 
3789  // First generate the array containing all arguments.
3790  BuildCreateArrayLiteral(args, nullptr);
3791 
3792  // Now pass that array to %reflect_construct.
3793  RegisterList construct_args = register_allocator()->NewRegisterList(3);
3794  builder()->StoreAccumulatorInRegister(construct_args[1]);
3795  builder()->MoveRegister(constructor, construct_args[0]);
3796  VisitForRegisterValue(super->new_target_var(), construct_args[2]);
3797  builder()->CallJSRuntime(Context::REFLECT_CONSTRUCT_INDEX, construct_args);
3798  } else {
3799  RegisterList args_regs = register_allocator()->NewGrowableRegisterList();
3800  VisitArguments(args, &args_regs);
3801  // The new target is loaded into the accumulator from the
3802  // {new.target} variable.
3803  VisitForAccumulatorValue(super->new_target_var());
3804  builder()->SetExpressionPosition(expr);
3805 
3806  int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
3807 
3808  if (first_spread_index == expr->arguments()->length() - 1) {
3809  builder()->ConstructWithSpread(constructor, args_regs,
3810  feedback_slot_index);
3811  } else {
3812  DCHECK_EQ(first_spread_index, expr->arguments()->length());
3813  // Call construct.
3814  // TODO(turbofan): For now we do gather feedback on super constructor
3815  // calls, utilizing the existing machinery to inline the actual call
3816  // target and the JSCreate for the implicit receiver allocation. This
3817  // is not an ideal solution for super constructor calls, but it gets
3818  // the job done for now. In the long run we might want to revisit this
3819  // and come up with a better way.
3820  builder()->Construct(constructor, args_regs, feedback_slot_index);
3821  }
3822  }
3823 
3824  // Explicit calls to the super constructor using super() perform an
3825  // implicit binding assignment to the 'this' variable.
3826  //
3827  // Default constructors don't need have to do the assignment because
3828  // 'this' isn't accessed in default constructors.
3829  if (!IsDefaultConstructor(info()->literal()->kind())) {
3830  BuildVariableAssignment(super->this_var()->var(), Token::INIT,
3831  HoleCheckMode::kRequired);
3832  }
3833 
3834  // The derived constructor has the correct bit set always, so we
3835  // don't emit code to load and call the initializer if not
3836  // required.
3837  //
3838  // For the arrow function or eval case, we always emit code to load
3839  // and call the initializer.
3840  //
3841  // TODO(gsathya): In the future, we could tag nested arrow functions
3842  // or eval with the correct bit so that we do the load conditionally
3843  // if required.
3844  if (info()->literal()->requires_instance_members_initializer() ||
3845  !IsDerivedConstructor(info()->literal()->kind())) {
3846  Register instance = register_allocator()->NewRegister();
3847  builder()->StoreAccumulatorInRegister(instance);
3848  BuildInstanceMemberInitialization(this_function, instance);
3849  builder()->LoadAccumulatorWithRegister(instance);
3850  }
3851 }
3852 
3853 void BytecodeGenerator::VisitCallNew(CallNew* expr) {
3854  Register constructor = VisitForRegisterValue(expr->expression());
3855  RegisterList args = register_allocator()->NewGrowableRegisterList();
3856  VisitArguments(expr->arguments(), &args);
3857 
3858  // The accumulator holds new target which is the same as the
3859  // constructor for CallNew.
3860  builder()->SetExpressionPosition(expr);
3861  builder()->LoadAccumulatorWithRegister(constructor);
3862 
3863  int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
3864  if (expr->only_last_arg_is_spread()) {
3865  builder()->ConstructWithSpread(constructor, args, feedback_slot_index);
3866  } else {
3867  builder()->Construct(constructor, args, feedback_slot_index);
3868  }
3869 }
3870 
3871 void BytecodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3872  if (expr->is_jsruntime()) {
3873  RegisterList args = register_allocator()->NewGrowableRegisterList();
3874  VisitArguments(expr->arguments(), &args);
3875  builder()->CallJSRuntime(expr->context_index(), args);
3876  } else {
3877  // Evaluate all arguments to the runtime call.
3878  RegisterList args = register_allocator()->NewGrowableRegisterList();
3879  VisitArguments(expr->arguments(), &args);
3880  Runtime::FunctionId function_id = expr->function()->function_id;
3881  builder()->CallRuntime(function_id, args);
3882  }
3883 }
3884 
3885 void BytecodeGenerator::VisitVoid(UnaryOperation* expr) {
3886  VisitForEffect(expr->expression());
3887  builder()->LoadUndefined();
3888 }
3889 
3890 void BytecodeGenerator::VisitForTypeOfValue(Expression* expr) {
3891  if (expr->IsVariableProxy()) {
3892  // Typeof does not throw a reference error on global variables, hence we
3893  // perform a non-contextual load in case the operand is a variable proxy.
3894  VariableProxy* proxy = expr->AsVariableProxy();
3895  BuildVariableLoadForAccumulatorValue(proxy->var(), proxy->hole_check_mode(),
3896  INSIDE_TYPEOF);
3897  } else {
3898  VisitForAccumulatorValue(expr);
3899  }
3900 }
3901 
3902 void BytecodeGenerator::VisitTypeOf(UnaryOperation* expr) {
3903  VisitForTypeOfValue(expr->expression());
3904  builder()->TypeOf();
3905 }
3906 
3907 void BytecodeGenerator::VisitNot(UnaryOperation* expr) {
3908  if (execution_result()->IsEffect()) {
3909  VisitForEffect(expr->expression());
3910  } else if (execution_result()->IsTest()) {
3911  // No actual logical negation happening, we just swap the control flow, by
3912  // swapping the target labels and the fallthrough branch, and visit in the
3913  // same test result context.
3914  TestResultScope* test_result = execution_result()->AsTest();
3915  test_result->InvertControlFlow();
3916  VisitInSameTestExecutionScope(expr->expression());
3917  } else {
3918  TypeHint type_hint = VisitForAccumulatorValue(expr->expression());
3919  builder()->LogicalNot(ToBooleanModeFromTypeHint(type_hint));
3920  // Always returns a boolean value.
3921  execution_result()->SetResultIsBoolean();
3922  }
3923 }
3924 
3925 void BytecodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3926  switch (expr->op()) {
3927  case Token::Value::NOT:
3928  VisitNot(expr);
3929  break;
3930  case Token::Value::TYPEOF:
3931  VisitTypeOf(expr);
3932  break;
3933  case Token::Value::VOID:
3934  VisitVoid(expr);
3935  break;
3936  case Token::Value::DELETE:
3937  VisitDelete(expr);
3938  break;
3939  case Token::Value::ADD:
3940  case Token::Value::SUB:
3941  case Token::Value::BIT_NOT:
3942  VisitForAccumulatorValue(expr->expression());
3943  builder()->SetExpressionPosition(expr);
3944  builder()->UnaryOperation(
3945  expr->op(), feedback_index(feedback_spec()->AddBinaryOpICSlot()));
3946  break;
3947  default:
3948  UNREACHABLE();
3949  }
3950 }
3951 
3952 void BytecodeGenerator::VisitDelete(UnaryOperation* expr) {
3953  if (expr->expression()->IsProperty()) {
3954  // Delete of an object property is allowed both in sloppy
3955  // and strict modes.
3956  Property* property = expr->expression()->AsProperty();
3957  Register object = VisitForRegisterValue(property->obj());
3958  VisitForAccumulatorValue(property->key());
3959  builder()->Delete(object, language_mode());
3960  } else if (expr->expression()->IsVariableProxy()) {
3961  // Delete of an unqualified identifier is allowed in sloppy mode but is
3962  // not allowed in strict mode. Deleting 'this' and 'new.target' is allowed
3963  // in both modes.
3964  VariableProxy* proxy = expr->expression()->AsVariableProxy();
3965  DCHECK(is_sloppy(language_mode()) || proxy->is_this() ||
3966  proxy->is_new_target());
3967  if (proxy->is_this() || proxy->is_new_target()) {
3968  builder()->LoadTrue();
3969  } else {
3970  Variable* variable = proxy->var();
3971  switch (variable->location()) {
3972  case VariableLocation::PARAMETER:
3973  case VariableLocation::LOCAL:
3974  case VariableLocation::CONTEXT: {
3975  // Deleting local var/let/const, context variables, and arguments
3976  // does not have any effect.
3977  builder()->LoadFalse();
3978  break;
3979  }
3980  case VariableLocation::UNALLOCATED:
3981  // TODO(adamk): Falling through to the runtime results in correct
3982  // behavior, but does unnecessary context-walking (since scope
3983  // analysis has already proven that the variable doesn't exist in
3984  // any non-global scope). Consider adding a DeleteGlobal bytecode
3985  // that knows how to deal with ScriptContexts as well as global
3986  // object properties.
3987  case VariableLocation::LOOKUP: {
3988  Register name_reg = register_allocator()->NewRegister();
3989  builder()
3990  ->LoadLiteral(variable->raw_name())
3991  .StoreAccumulatorInRegister(name_reg)
3992  .CallRuntime(Runtime::kDeleteLookupSlot, name_reg);
3993  break;
3994  }
3995  default:
3996  UNREACHABLE();
3997  }
3998  }
3999  } else {
4000  // Delete of an unresolvable reference returns true.
4001  VisitForEffect(expr->expression());
4002  builder()->LoadTrue();
4003  }
4004 }
4005 
4006 void BytecodeGenerator::VisitCountOperation(CountOperation* expr) {
4007  DCHECK(expr->expression()->IsValidReferenceExpression());
4008 
4009  // Left-hand side can only be a property, a global or a variable slot.
4010  Property* property = expr->expression()->AsProperty();
4011  LhsKind assign_type = Property::GetAssignType(property);
4012 
4013  bool is_postfix = expr->is_postfix() && !execution_result()->IsEffect();
4014 
4015  // Evaluate LHS expression and get old value.
4016  Register object, key, old_value;
4017  RegisterList super_property_args;
4018  const AstRawString* name;
4019  switch (assign_type) {
4020  case VARIABLE: {
4021  VariableProxy* proxy = expr->expression()->AsVariableProxy();
4022  BuildVariableLoadForAccumulatorValue(proxy->var(),
4023  proxy->hole_check_mode());
4024  break;
4025  }
4026  case NAMED_PROPERTY: {
4027  object = VisitForRegisterValue(property->obj());
4028  name = property->key()->AsLiteral()->AsRawPropertyName();
4029  builder()->LoadNamedProperty(
4030  object, name,
4031  feedback_index(GetCachedLoadICSlot(property->obj(), name)));
4032  break;
4033  }
4034  case KEYED_PROPERTY: {
4035  object = VisitForRegisterValue(property->obj());
4036  // Use visit for accumulator here since we need the key in the accumulator
4037  // for the LoadKeyedProperty.
4038  key = register_allocator()->NewRegister();
4039  VisitForAccumulatorValue(property->key());
4040  builder()->StoreAccumulatorInRegister(key).LoadKeyedProperty(
4041  object, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
4042  break;
4043  }
4044  case NAMED_SUPER_PROPERTY: {
4045  super_property_args = register_allocator()->NewRegisterList(4);
4046  RegisterList load_super_args = super_property_args.Truncate(3);
4047  SuperPropertyReference* super_property =
4048  property->obj()->AsSuperPropertyReference();
4049  VisitForRegisterValue(super_property->this_var(), load_super_args[0]);
4050  VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
4051  builder()
4052  ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
4053  .StoreAccumulatorInRegister(load_super_args[2])
4054  .CallRuntime(Runtime::kLoadFromSuper, load_super_args);
4055  break;
4056  }
4057  case KEYED_SUPER_PROPERTY: {
4058  super_property_args = register_allocator()->NewRegisterList(4);
4059  RegisterList load_super_args = super_property_args.Truncate(3);
4060  SuperPropertyReference* super_property =
4061  property->obj()->AsSuperPropertyReference();
4062  VisitForRegisterValue(super_property->this_var(), load_super_args[0]);
4063  VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
4064  VisitForRegisterValue(property->key(), load_super_args[2]);
4065  builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, load_super_args);
4066  break;
4067  }
4068  }
4069 
4070  // Save result for postfix expressions.
4071  FeedbackSlot count_slot = feedback_spec()->AddBinaryOpICSlot();
4072  if (is_postfix) {
4073  old_value = register_allocator()->NewRegister();
4074  // Convert old value into a number before saving it.
4075  // TODO(ignition): Think about adding proper PostInc/PostDec bytecodes
4076  // instead of this ToNumeric + Inc/Dec dance.
4077  builder()
4078  ->ToNumeric(feedback_index(count_slot))
4079  .StoreAccumulatorInRegister(old_value);
4080  }
4081 
4082  // Perform +1/-1 operation.
4083  builder()->UnaryOperation(expr->op(), feedback_index(count_slot));
4084 
4085  // Store the value.
4086  builder()->SetExpressionPosition(expr);
4087  switch (assign_type) {
4088  case VARIABLE: {
4089  VariableProxy* proxy = expr->expression()->AsVariableProxy();
4090  BuildVariableAssignment(proxy->var(), expr->op(),
4091  proxy->hole_check_mode());
4092  break;
4093  }
4094  case NAMED_PROPERTY: {
4095  FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
4096  Register value;
4097  if (!execution_result()->IsEffect()) {
4098  value = register_allocator()->NewRegister();
4099  builder()->StoreAccumulatorInRegister(value);
4100  }
4101  builder()->StoreNamedProperty(object, name, feedback_index(slot),
4102  language_mode());
4103  if (!execution_result()->IsEffect()) {
4104  builder()->LoadAccumulatorWithRegister(value);
4105  }
4106  break;
4107  }
4108  case KEYED_PROPERTY: {
4109  FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
4110  Register value;
4111  if (!execution_result()->IsEffect()) {
4112  value = register_allocator()->NewRegister();
4113  builder()->StoreAccumulatorInRegister(value);
4114  }
4115  builder()->StoreKeyedProperty(object, key, feedback_index(slot),
4116  language_mode());
4117  if (!execution_result()->IsEffect()) {
4118  builder()->LoadAccumulatorWithRegister(value);
4119  }
4120  break;
4121  }
4122  case NAMED_SUPER_PROPERTY: {
4123  builder()
4124  ->StoreAccumulatorInRegister(super_property_args[3])
4125  .CallRuntime(StoreToSuperRuntimeId(), super_property_args);
4126  break;
4127  }
4128  case KEYED_SUPER_PROPERTY: {
4129  builder()
4130  ->StoreAccumulatorInRegister(super_property_args[3])
4131  .CallRuntime(StoreKeyedToSuperRuntimeId(), super_property_args);
4132  break;
4133  }
4134  }
4135 
4136  // Restore old value for postfix expressions.
4137  if (is_postfix) {
4138  builder()->LoadAccumulatorWithRegister(old_value);
4139  }
4140 }
4141 
4142 void BytecodeGenerator::VisitBinaryOperation(BinaryOperation* binop) {
4143  switch (binop->op()) {
4144  case Token::COMMA:
4145  VisitCommaExpression(binop);
4146  break;
4147  case Token::OR:
4148  VisitLogicalOrExpression(binop);
4149  break;
4150  case Token::AND:
4151  VisitLogicalAndExpression(binop);
4152  break;
4153  default:
4154  VisitArithmeticExpression(binop);
4155  break;
4156  }
4157 }
4158 
4159 void BytecodeGenerator::VisitNaryOperation(NaryOperation* expr) {
4160  switch (expr->op()) {
4161  case Token::COMMA:
4162  VisitNaryCommaExpression(expr);
4163  break;
4164  case Token::OR:
4165  VisitNaryLogicalOrExpression(expr);
4166  break;
4167  case Token::AND:
4168  VisitNaryLogicalAndExpression(expr);
4169  break;
4170  default:
4171  VisitNaryArithmeticExpression(expr);
4172  break;
4173  }
4174 }
4175 
4176 void BytecodeGenerator::BuildLiteralCompareNil(
4177  Token::Value op, BytecodeArrayBuilder::NilValue nil) {
4178  if (execution_result()->IsTest()) {
4179  TestResultScope* test_result = execution_result()->AsTest();
4180  switch (test_result->fallthrough()) {
4181  case TestFallthrough::kThen:
4182  builder()->JumpIfNotNil(test_result->NewElseLabel(), op, nil);
4183  break;
4184  case TestFallthrough::kElse:
4185  builder()->JumpIfNil(test_result->NewThenLabel(), op, nil);
4186  break;
4187  case TestFallthrough::kNone:
4188  builder()
4189  ->JumpIfNil(test_result->NewThenLabel(), op, nil)
4190  .Jump(test_result->NewElseLabel());
4191  }
4192  test_result->SetResultConsumedByTest();
4193  } else {
4194  builder()->CompareNil(op, nil);
4195  }
4196 }
4197 
4198 void BytecodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4199  Expression* sub_expr;
4200  Literal* literal;
4201  if (expr->IsLiteralCompareTypeof(&sub_expr, &literal)) {
4202  // Emit a fast literal comparion for expressions of the form:
4203  // typeof(x) === 'string'.
4204  VisitForTypeOfValue(sub_expr);
4205  builder()->SetExpressionPosition(expr);
4206  TestTypeOfFlags::LiteralFlag literal_flag =
4207  TestTypeOfFlags::GetFlagForLiteral(ast_string_constants(), literal);
4208  if (literal_flag == TestTypeOfFlags::LiteralFlag::kOther) {
4209  builder()->LoadFalse();
4210  } else {
4211  builder()->CompareTypeOf(literal_flag);
4212  }
4213  } else if (expr->IsLiteralCompareUndefined(&sub_expr)) {
4214  VisitForAccumulatorValue(sub_expr);
4215  builder()->SetExpressionPosition(expr);
4216  BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kUndefinedValue);
4217  } else if (expr->IsLiteralCompareNull(&sub_expr)) {
4218  VisitForAccumulatorValue(sub_expr);
4219  builder()->SetExpressionPosition(expr);
4220  BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kNullValue);
4221  } else {
4222  Register lhs = VisitForRegisterValue(expr->left());
4223  VisitForAccumulatorValue(expr->right());
4224  builder()->SetExpressionPosition(expr);
4225  if (expr->op() == Token::IN) {
4226  builder()->CompareOperation(expr->op(), lhs);
4227  } else if (expr->op() == Token::INSTANCEOF) {
4228  FeedbackSlot slot = feedback_spec()->AddInstanceOfSlot();
4229  builder()->CompareOperation(expr->op(), lhs, feedback_index(slot));
4230  } else {
4231  FeedbackSlot slot = feedback_spec()->AddCompareICSlot();
4232  builder()->CompareOperation(expr->op(), lhs, feedback_index(slot));
4233  }
4234  }
4235  // Always returns a boolean value.
4236  execution_result()->SetResultIsBoolean();
4237 }
4238 
4239 void BytecodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
4240  FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
4241  Expression* subexpr;
4242  Smi literal;
4243  if (expr->IsSmiLiteralOperation(&subexpr, &literal)) {
4244  TypeHint type_hint = VisitForAccumulatorValue(subexpr);
4245  builder()->SetExpressionPosition(expr);
4246  builder()->BinaryOperationSmiLiteral(expr->op(), literal,
4247  feedback_index(slot));
4248  if (expr->op() == Token::ADD && type_hint == TypeHint::kString) {
4249  execution_result()->SetResultIsString();
4250  }
4251  } else {
4252  TypeHint lhs_type = VisitForAccumulatorValue(expr->left());
4253  Register lhs = register_allocator()->NewRegister();
4254  builder()->StoreAccumulatorInRegister(lhs);
4255  TypeHint rhs_type = VisitForAccumulatorValue(expr->right());
4256  if (expr->op() == Token::ADD &&
4257  (lhs_type == TypeHint::kString || rhs_type == TypeHint::kString)) {
4258  execution_result()->SetResultIsString();
4259  }
4260 
4261  builder()->SetExpressionPosition(expr);
4262  builder()->BinaryOperation(expr->op(), lhs, feedback_index(slot));
4263  }
4264 }
4265 
4266 void BytecodeGenerator::VisitNaryArithmeticExpression(NaryOperation* expr) {
4267  // TODO(leszeks): Add support for lhs smi in commutative ops.
4268  TypeHint type_hint = VisitForAccumulatorValue(expr->first());
4269 
4270  for (size_t i = 0; i < expr->subsequent_length(); ++i) {
4271  RegisterAllocationScope register_scope(this);
4272  if (expr->subsequent(i)->IsSmiLiteral()) {
4273  builder()->SetExpressionPosition(expr->subsequent_op_position(i));
4274  builder()->BinaryOperationSmiLiteral(
4275  expr->op(), expr->subsequent(i)->AsLiteral()->AsSmiLiteral(),
4276  feedback_index(feedback_spec()->AddBinaryOpICSlot()));
4277  } else {
4278  Register lhs = register_allocator()->NewRegister();
4279  builder()->StoreAccumulatorInRegister(lhs);
4280  TypeHint rhs_hint = VisitForAccumulatorValue(expr->subsequent(i));
4281  if (rhs_hint == TypeHint::kString) type_hint = TypeHint::kString;
4282  builder()->SetExpressionPosition(expr->subsequent_op_position(i));
4283  builder()->BinaryOperation(
4284  expr->op(), lhs,
4285  feedback_index(feedback_spec()->AddBinaryOpICSlot()));
4286  }
4287  }
4288 
4289  if (type_hint == TypeHint::kString && expr->op() == Token::ADD) {
4290  // If any operand of an ADD is a String, a String is produced.
4291  execution_result()->SetResultIsString();
4292  }
4293 }
4294 
4295 // Note: the actual spreading is performed by the surrounding expression's
4296 // visitor.
4297 void BytecodeGenerator::VisitSpread(Spread* expr) { Visit(expr->expression()); }
4298 
4299 void BytecodeGenerator::VisitEmptyParentheses(EmptyParentheses* expr) {
4300  UNREACHABLE();
4301 }
4302 
4303 void BytecodeGenerator::VisitImportCallExpression(ImportCallExpression* expr) {
4304  RegisterList args = register_allocator()->NewRegisterList(2);
4305  VisitForRegisterValue(expr->argument(), args[1]);
4306  builder()
4307  ->MoveRegister(Register::function_closure(), args[0])
4308  .CallRuntime(Runtime::kDynamicImportCall, args);
4309 }
4310 
4311 void BytecodeGenerator::BuildGetIterator(Expression* iterable,
4312  IteratorType hint) {
4313  RegisterList args = register_allocator()->NewRegisterList(1);
4314  Register method = register_allocator()->NewRegister();
4315  Register obj = args[0];
4316 
4317  VisitForAccumulatorValue(iterable);
4318 
4319  if (hint == IteratorType::kAsync) {
4320  // Set method to GetMethod(obj, @@asyncIterator)
4321  builder()->StoreAccumulatorInRegister(obj).LoadAsyncIteratorProperty(
4322  obj, feedback_index(feedback_spec()->AddLoadICSlot()));
4323 
4324  BytecodeLabel async_iterator_undefined, async_iterator_null, done;
4325  // TODO(ignition): Add a single opcode for JumpIfNullOrUndefined
4326  builder()->JumpIfUndefined(&async_iterator_undefined);
4327  builder()->JumpIfNull(&async_iterator_null);
4328 
4329  // Let iterator be Call(method, obj)
4330  builder()->StoreAccumulatorInRegister(method).CallProperty(
4331  method, args, feedback_index(feedback_spec()->AddCallICSlot()));
4332 
4333  // If Type(iterator) is not Object, throw a TypeError exception.
4334  builder()->JumpIfJSReceiver(&done);
4335  builder()->CallRuntime(Runtime::kThrowSymbolAsyncIteratorInvalid);
4336 
4337  builder()->Bind(&async_iterator_undefined);
4338  builder()->Bind(&async_iterator_null);
4339  // If method is undefined,
4340  // Let syncMethod be GetMethod(obj, @@iterator)
4341  builder()
4342  ->LoadIteratorProperty(obj,
4343  feedback_index(feedback_spec()->AddLoadICSlot()))
4344  .StoreAccumulatorInRegister(method);
4345 
4346  // Let syncIterator be Call(syncMethod, obj)
4347  builder()->CallProperty(method, args,
4348  feedback_index(feedback_spec()->AddCallICSlot()));
4349 
4350  // Return CreateAsyncFromSyncIterator(syncIterator)
4351  // alias `method` register as it's no longer used
4352  Register sync_iter = method;
4353  builder()->StoreAccumulatorInRegister(sync_iter).CallRuntime(
4354  Runtime::kInlineCreateAsyncFromSyncIterator, sync_iter);
4355 
4356  builder()->Bind(&done);
4357  } else {
4358  // Let method be GetMethod(obj, @@iterator).
4359  builder()
4360  ->StoreAccumulatorInRegister(obj)
4361  .LoadIteratorProperty(obj,
4362  feedback_index(feedback_spec()->AddLoadICSlot()))
4363  .StoreAccumulatorInRegister(method);
4364 
4365  // Let iterator be Call(method, obj).
4366  builder()->CallProperty(method, args,
4367  feedback_index(feedback_spec()->AddCallICSlot()));
4368 
4369  // If Type(iterator) is not Object, throw a TypeError exception.
4370  BytecodeLabel no_type_error;
4371  builder()->JumpIfJSReceiver(&no_type_error);
4372  builder()->CallRuntime(Runtime::kThrowSymbolIteratorInvalid);
4373  builder()->Bind(&no_type_error);
4374  }
4375 }
4376 
4377 // Returns an IteratorRecord which is valid for the lifetime of the current
4378 // register_allocation_scope.
4379 BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
4380  Expression* iterable, Register next, Register object, IteratorType hint) {
4381  DCHECK(next.is_valid() && object.is_valid());
4382  BuildGetIterator(iterable, hint);
4383 
4384  builder()
4385  ->StoreAccumulatorInRegister(object)
4386  .LoadNamedProperty(object, ast_string_constants()->next_string(),
4387  feedback_index(feedback_spec()->AddLoadICSlot()))
4388  .StoreAccumulatorInRegister(next);
4389  return IteratorRecord(object, next, hint);
4390 }
4391 
4392 BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
4393  Expression* iterable, IteratorType hint) {
4394  Register next = register_allocator()->NewRegister();
4395  Register object = register_allocator()->NewRegister();
4396  return BuildGetIteratorRecord(iterable, next, object, hint);
4397 }
4398 
4399 void BytecodeGenerator::BuildIteratorNext(const IteratorRecord& iterator,
4400  Register next_result) {
4401  DCHECK(next_result.is_valid());
4402  builder()->CallProperty(iterator.next(), RegisterList(iterator.object()),
4403  feedback_index(feedback_spec()->AddCallICSlot()));
4404 
4405  // TODO(caitp): support async IteratorNext here.
4406 
4407  BytecodeLabel is_object;
4408  builder()
4409  ->StoreAccumulatorInRegister(next_result)
4410  .JumpIfJSReceiver(&is_object)
4411  .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, next_result)
4412  .Bind(&is_object);
4413 }
4414 
4415 void BytecodeGenerator::BuildCallIteratorMethod(Register iterator,
4416  const AstRawString* method_name,
4417  RegisterList receiver_and_args,
4418  BytecodeLabel* if_called,
4419  BytecodeLabels* if_notcalled) {
4420  RegisterAllocationScope register_scope(this);
4421 
4422  Register method = register_allocator()->NewRegister();
4423  FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
4424  builder()
4425  ->LoadNamedProperty(iterator, method_name, feedback_index(slot))
4426  .JumpIfUndefined(if_notcalled->New())
4427  .JumpIfNull(if_notcalled->New())
4428  .StoreAccumulatorInRegister(method)
4429  .CallProperty(method, receiver_and_args,
4430  feedback_index(feedback_spec()->AddCallICSlot()))
4431  .Jump(if_called);
4432 }
4433 
4434 void BytecodeGenerator::BuildIteratorClose(const IteratorRecord& iterator,
4435  Expression* expr) {
4436  RegisterAllocationScope register_scope(this);
4437  BytecodeLabels done(zone());
4438  BytecodeLabel if_called;
4439  RegisterList args = RegisterList(iterator.object());
4440  BuildCallIteratorMethod(iterator.object(),
4441  ast_string_constants()->return_string(), args,
4442  &if_called, &done);
4443  builder()->Bind(&if_called);
4444 
4445  if (iterator.type() == IteratorType::kAsync) {
4446  DCHECK_NOT_NULL(expr);
4447  BuildAwait(expr);
4448  }
4449 
4450  builder()->JumpIfJSReceiver(done.New());
4451  {
4452  RegisterAllocationScope register_scope(this);
4453  Register return_result = register_allocator()->NewRegister();
4454  builder()
4455  ->StoreAccumulatorInRegister(return_result)
4456  .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, return_result);
4457  }
4458 
4459  done.Bind(builder());
4460 }
4461 
4462 void BytecodeGenerator::VisitGetIterator(GetIterator* expr) {
4463  builder()->SetExpressionPosition(expr);
4464  BuildGetIterator(expr->iterable(), expr->hint());
4465 }
4466 
4467 void BytecodeGenerator::VisitGetTemplateObject(GetTemplateObject* expr) {
4468  builder()->SetExpressionPosition(expr);
4469  size_t entry = builder()->AllocateDeferredConstantPoolEntry();
4470  template_objects_.push_back(std::make_pair(expr, entry));
4471  FeedbackSlot literal_slot = feedback_spec()->AddLiteralSlot();
4472  builder()->GetTemplateObject(entry, feedback_index(literal_slot));
4473 }
4474 
4475 void BytecodeGenerator::VisitTemplateLiteral(TemplateLiteral* expr) {
4476  const ZonePtrList<const AstRawString>& parts = *expr->string_parts();
4477  const ZonePtrList<Expression>& substitutions = *expr->substitutions();
4478  // Template strings with no substitutions are turned into StringLiterals.
4479  DCHECK_GT(substitutions.length(), 0);
4480  DCHECK_EQ(parts.length(), substitutions.length() + 1);
4481 
4482  // Generate string concatenation
4483  // TODO(caitp): Don't generate feedback slot if it's not used --- introduce
4484  // a simple, concise, reusable mechanism to lazily create reusable slots.
4485  FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
4486  Register last_part = register_allocator()->NewRegister();
4487  bool last_part_valid = false;
4488 
4489  builder()->SetExpressionPosition(expr);
4490  for (int i = 0; i < substitutions.length(); ++i) {
4491  if (i != 0) {
4492  builder()->StoreAccumulatorInRegister(last_part);
4493  last_part_valid = true;
4494  }
4495 
4496  if (!parts[i]->IsEmpty()) {
4497  builder()->LoadLiteral(parts[i]);
4498  if (last_part_valid) {
4499  builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
4500  }
4501  builder()->StoreAccumulatorInRegister(last_part);
4502  last_part_valid = true;
4503  }
4504 
4505  TypeHint type_hint = VisitForAccumulatorValue(substitutions[i]);
4506  if (type_hint != TypeHint::kString) {
4507  builder()->ToString();
4508  }
4509  if (last_part_valid) {
4510  builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
4511  }
4512  last_part_valid = false;
4513  }
4514 
4515  if (!parts.last()->IsEmpty()) {
4516  builder()->StoreAccumulatorInRegister(last_part);
4517  builder()->LoadLiteral(parts.last());
4518  builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
4519  }
4520 }
4521 
4522 void BytecodeGenerator::VisitThisFunction(ThisFunction* expr) {
4523  builder()->LoadAccumulatorWithRegister(Register::function_closure());
4524 }
4525 
4526 void BytecodeGenerator::VisitSuperCallReference(SuperCallReference* expr) {
4527  // Handled by VisitCall().
4528  UNREACHABLE();
4529 }
4530 
4531 void BytecodeGenerator::VisitSuperPropertyReference(
4532  SuperPropertyReference* expr) {
4533  builder()->CallRuntime(Runtime::kThrowUnsupportedSuperError);
4534 }
4535 
4536 void BytecodeGenerator::VisitCommaExpression(BinaryOperation* binop) {
4537  VisitForEffect(binop->left());
4538  Visit(binop->right());
4539 }
4540 
4541 void BytecodeGenerator::VisitNaryCommaExpression(NaryOperation* expr) {
4542  DCHECK_GT(expr->subsequent_length(), 0);
4543 
4544  VisitForEffect(expr->first());
4545  for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
4546  VisitForEffect(expr->subsequent(i));
4547  }
4548  Visit(expr->subsequent(expr->subsequent_length() - 1));
4549 }
4550 
4551 void BytecodeGenerator::VisitLogicalTestSubExpression(
4552  Token::Value token, Expression* expr, BytecodeLabels* then_labels,
4553  BytecodeLabels* else_labels, int coverage_slot) {
4554  DCHECK(token == Token::OR || token == Token::AND);
4555 
4556  BytecodeLabels test_next(zone());
4557  if (token == Token::OR) {
4558  VisitForTest(expr, then_labels, &test_next, TestFallthrough::kElse);
4559  } else {
4560  DCHECK_EQ(Token::AND, token);
4561  VisitForTest(expr, &test_next, else_labels, TestFallthrough::kThen);
4562  }
4563  test_next.Bind(builder());
4564 
4565  BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
4566 }
4567 
4568 void BytecodeGenerator::VisitLogicalTest(Token::Value token, Expression* left,
4569  Expression* right,
4570  int right_coverage_slot) {
4571  DCHECK(token == Token::OR || token == Token::AND);
4572  TestResultScope* test_result = execution_result()->AsTest();
4573  BytecodeLabels* then_labels = test_result->then_labels();
4574  BytecodeLabels* else_labels = test_result->else_labels();
4575  TestFallthrough fallthrough = test_result->fallthrough();
4576 
4577  VisitLogicalTestSubExpression(token, left, then_labels, else_labels,
4578  right_coverage_slot);
4579  // The last test has the same then, else and fallthrough as the parent test.
4580  VisitForTest(right, then_labels, else_labels, fallthrough);
4581 }
4582 
4583 void BytecodeGenerator::VisitNaryLogicalTest(
4584  Token::Value token, NaryOperation* expr,
4585  const NaryCodeCoverageSlots* coverage_slots) {
4586  DCHECK(token == Token::OR || token == Token::AND);
4587  DCHECK_GT(expr->subsequent_length(), 0);
4588 
4589  TestResultScope* test_result = execution_result()->AsTest();
4590  BytecodeLabels* then_labels = test_result->then_labels();
4591  BytecodeLabels* else_labels = test_result->else_labels();
4592  TestFallthrough fallthrough = test_result->fallthrough();
4593 
4594  VisitLogicalTestSubExpression(token, expr->first(), then_labels, else_labels,
4595  coverage_slots->GetSlotFor(0));
4596  for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
4597  VisitLogicalTestSubExpression(token, expr->subsequent(i), then_labels,
4598  else_labels,
4599  coverage_slots->GetSlotFor(i + 1));
4600  }
4601  // The last test has the same then, else and fallthrough as the parent test.
4602  VisitForTest(expr->subsequent(expr->subsequent_length() - 1), then_labels,
4603  else_labels, fallthrough);
4604 }
4605 
4606 bool BytecodeGenerator::VisitLogicalOrSubExpression(Expression* expr,
4607  BytecodeLabels* end_labels,
4608  int coverage_slot) {
4609  if (expr->ToBooleanIsTrue()) {
4610  VisitForAccumulatorValue(expr);
4611  end_labels->Bind(builder());
4612  return true;
4613  } else if (!expr->ToBooleanIsFalse()) {
4614  TypeHint type_hint = VisitForAccumulatorValue(expr);
4615  builder()->JumpIfTrue(ToBooleanModeFromTypeHint(type_hint),
4616  end_labels->New());
4617  }
4618 
4619  BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
4620 
4621  return false;
4622 }
4623 
4624 bool BytecodeGenerator::VisitLogicalAndSubExpression(Expression* expr,
4625  BytecodeLabels* end_labels,
4626  int coverage_slot) {
4627  if (expr->ToBooleanIsFalse()) {
4628  VisitForAccumulatorValue(expr);
4629  end_labels->Bind(builder());
4630  return true;
4631  } else if (!expr->ToBooleanIsTrue()) {
4632  TypeHint type_hint = VisitForAccumulatorValue(expr);
4633  builder()->JumpIfFalse(ToBooleanModeFromTypeHint(type_hint),
4634  end_labels->New());
4635  }
4636 
4637  BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
4638 
4639  return false;
4640 }
4641 
4642 void BytecodeGenerator::VisitLogicalOrExpression(BinaryOperation* binop) {
4643  Expression* left = binop->left();
4644  Expression* right = binop->right();
4645 
4646  int right_coverage_slot =
4647  AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
4648 
4649  if (execution_result()->IsTest()) {
4650  TestResultScope* test_result = execution_result()->AsTest();
4651  if (left->ToBooleanIsTrue()) {
4652  builder()->Jump(test_result->NewThenLabel());
4653  } else if (left->ToBooleanIsFalse() && right->ToBooleanIsFalse()) {
4654  BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
4655  builder()->Jump(test_result->NewElseLabel());
4656  } else {
4657  VisitLogicalTest(Token::OR, left, right, right_coverage_slot);
4658  }
4659  test_result->SetResultConsumedByTest();
4660  } else {
4661  BytecodeLabels end_labels(zone());
4662  if (VisitLogicalOrSubExpression(left, &end_labels, right_coverage_slot)) {
4663  return;
4664  }
4665  VisitForAccumulatorValue(right);
4666  end_labels.Bind(builder());
4667  }
4668 }
4669 
4670 void BytecodeGenerator::VisitNaryLogicalOrExpression(NaryOperation* expr) {
4671  Expression* first = expr->first();
4672  DCHECK_GT(expr->subsequent_length(), 0);
4673 
4674  NaryCodeCoverageSlots coverage_slots(this, expr);
4675 
4676  if (execution_result()->IsTest()) {
4677  TestResultScope* test_result = execution_result()->AsTest();
4678  if (first->ToBooleanIsTrue()) {
4679  builder()->Jump(test_result->NewThenLabel());
4680  } else {
4681  VisitNaryLogicalTest(Token::OR, expr, &coverage_slots);
4682  }
4683  test_result->SetResultConsumedByTest();
4684  } else {
4685  BytecodeLabels end_labels(zone());
4686  if (VisitLogicalOrSubExpression(first, &end_labels,
4687  coverage_slots.GetSlotFor(0))) {
4688  return;
4689  }
4690  for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
4691  if (VisitLogicalOrSubExpression(expr->subsequent(i), &end_labels,
4692  coverage_slots.GetSlotFor(i + 1))) {
4693  return;
4694  }
4695  }
4696  // We have to visit the last value even if it's true, because we need its
4697  // actual value.
4698  VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
4699  end_labels.Bind(builder());
4700  }
4701 }
4702 
4703 void BytecodeGenerator::VisitLogicalAndExpression(BinaryOperation* binop) {
4704  Expression* left = binop->left();
4705  Expression* right = binop->right();
4706 
4707  int right_coverage_slot =
4708  AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
4709 
4710  if (execution_result()->IsTest()) {
4711  TestResultScope* test_result = execution_result()->AsTest();
4712  if (left->ToBooleanIsFalse()) {
4713  builder()->Jump(test_result->NewElseLabel());
4714  } else if (left->ToBooleanIsTrue() && right->ToBooleanIsTrue()) {
4715  BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
4716  builder()->Jump(test_result->NewThenLabel());
4717  } else {
4718  VisitLogicalTest(Token::AND, left, right, right_coverage_slot);
4719  }
4720  test_result->SetResultConsumedByTest();
4721  } else {
4722  BytecodeLabels end_labels(zone());
4723  if (VisitLogicalAndSubExpression(left, &end_labels, right_coverage_slot)) {
4724  return;
4725  }
4726  VisitForAccumulatorValue(right);
4727  end_labels.Bind(builder());
4728  }
4729 }
4730 
4731 void BytecodeGenerator::VisitNaryLogicalAndExpression(NaryOperation* expr) {
4732  Expression* first = expr->first();
4733  DCHECK_GT(expr->subsequent_length(), 0);
4734 
4735  NaryCodeCoverageSlots coverage_slots(this, expr);
4736 
4737  if (execution_result()->IsTest()) {
4738  TestResultScope* test_result = execution_result()->AsTest();
4739  if (first->ToBooleanIsFalse()) {
4740  builder()->Jump(test_result->NewElseLabel());
4741  } else {
4742  VisitNaryLogicalTest(Token::AND, expr, &coverage_slots);
4743  }
4744  test_result->SetResultConsumedByTest();
4745  } else {
4746  BytecodeLabels end_labels(zone());
4747  if (VisitLogicalAndSubExpression(first, &end_labels,
4748  coverage_slots.GetSlotFor(0))) {
4749  return;
4750  }
4751  for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
4752  if (VisitLogicalAndSubExpression(expr->subsequent(i), &end_labels,
4753  coverage_slots.GetSlotFor(i + 1))) {
4754  return;
4755  }
4756  }
4757  // We have to visit the last value even if it's false, because we need its
4758  // actual value.
4759  VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
4760  end_labels.Bind(builder());
4761  }
4762 }
4763 
4764 void BytecodeGenerator::VisitRewritableExpression(RewritableExpression* expr) {
4765  Visit(expr->expression());
4766 }
4767 
4768 void BytecodeGenerator::BuildNewLocalActivationContext() {
4769  ValueResultScope value_execution_result(this);
4770  Scope* scope = closure_scope();
4771  DCHECK_EQ(current_scope(), closure_scope());
4772 
4773  // Create the appropriate context.
4774  if (scope->is_script_scope()) {
4775  Register scope_reg = register_allocator()->NewRegister();
4776  builder()
4777  ->LoadLiteral(scope)
4778  .StoreAccumulatorInRegister(scope_reg)
4779  .CallRuntime(Runtime::kNewScriptContext, scope_reg);
4780  } else if (scope->is_module_scope()) {
4781  // We don't need to do anything for the outer script scope.
4782  DCHECK(scope->outer_scope()->is_script_scope());
4783 
4784  // A JSFunction representing a module is called with the module object as
4785  // its sole argument.
4786  RegisterList args = register_allocator()->NewRegisterList(2);
4787  builder()
4788  ->MoveRegister(builder()->Parameter(0), args[0])
4789  .LoadLiteral(scope)
4790  .StoreAccumulatorInRegister(args[1])
4791  .CallRuntime(Runtime::kPushModuleContext, args);
4792  } else {
4793  DCHECK(scope->is_function_scope() || scope->is_eval_scope());
4794  int slot_count = scope->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
4795  if (slot_count <= ConstructorBuiltins::MaximumFunctionContextSlots()) {
4796  switch (scope->scope_type()) {
4797  case EVAL_SCOPE:
4798  builder()->CreateEvalContext(scope, slot_count);
4799  break;
4800  case FUNCTION_SCOPE:
4801  builder()->CreateFunctionContext(scope, slot_count);
4802  break;
4803  default:
4804  UNREACHABLE();
4805  }
4806  } else {
4807  Register arg = register_allocator()->NewRegister();
4808  builder()->LoadLiteral(scope).StoreAccumulatorInRegister(arg).CallRuntime(
4809  Runtime::kNewFunctionContext, arg);
4810  }
4811  }
4812 }
4813 
4814 void BytecodeGenerator::BuildLocalActivationContextInitialization() {
4815  DeclarationScope* scope = closure_scope();
4816 
4817  if (scope->has_this_declaration() && scope->receiver()->IsContextSlot()) {
4818  Variable* variable = scope->receiver();
4819  Register receiver(builder()->Receiver());
4820  // Context variable (at bottom of the context chain).
4821  DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
4822  builder()->LoadAccumulatorWithRegister(receiver).StoreContextSlot(
4823  execution_context()->reg(), variable->index(), 0);
4824  }
4825 
4826  // Copy parameters into context if necessary.
4827  int num_parameters = scope->num_parameters();
4828  for (int i = 0; i < num_parameters; i++) {
4829  Variable* variable = scope->parameter(i);
4830  if (!variable->IsContextSlot()) continue;
4831 
4832  Register parameter(builder()->Parameter(i));
4833  // Context variable (at bottom of the context chain).
4834  DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
4835  builder()->LoadAccumulatorWithRegister(parameter).StoreContextSlot(
4836  execution_context()->reg(), variable->index(), 0);
4837  }
4838 }
4839 
4840 void BytecodeGenerator::BuildNewLocalBlockContext(Scope* scope) {
4841  ValueResultScope value_execution_result(this);
4842  DCHECK(scope->is_block_scope());
4843 
4844  builder()->CreateBlockContext(scope);
4845 }
4846 
4847 void BytecodeGenerator::BuildNewLocalWithContext(Scope* scope) {
4848  ValueResultScope value_execution_result(this);
4849 
4850  Register extension_object = register_allocator()->NewRegister();
4851 
4852  builder()->ToObject(extension_object);
4853  builder()->CreateWithContext(extension_object, scope);
4854 }
4855 
4856 void BytecodeGenerator::BuildNewLocalCatchContext(Scope* scope) {
4857  ValueResultScope value_execution_result(this);
4858  DCHECK(scope->catch_variable()->IsContextSlot());
4859 
4860  Register exception = register_allocator()->NewRegister();
4861  builder()->StoreAccumulatorInRegister(exception);
4862  builder()->CreateCatchContext(exception, scope);
4863 }
4864 
4865 void BytecodeGenerator::VisitObjectLiteralAccessor(
4866  Register home_object, ObjectLiteralProperty* property, Register value_out) {
4867  if (property == nullptr) {
4868  builder()->LoadNull().StoreAccumulatorInRegister(value_out);
4869  } else {
4870  VisitForRegisterValue(property->value(), value_out);
4871  VisitSetHomeObject(value_out, home_object, property);
4872  }
4873 }
4874 
4875 void BytecodeGenerator::VisitSetHomeObject(Register value, Register home_object,
4876  LiteralProperty* property) {
4877  Expression* expr = property->value();
4878  if (FunctionLiteral::NeedsHomeObject(expr)) {
4879  FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
4880  builder()
4881  ->LoadAccumulatorWithRegister(home_object)
4882  .StoreHomeObjectProperty(value, feedback_index(slot), language_mode());
4883  }
4884 }
4885 
4886 void BytecodeGenerator::VisitArgumentsObject(Variable* variable) {
4887  if (variable == nullptr) return;
4888 
4889  DCHECK(variable->IsContextSlot() || variable->IsStackAllocated());
4890 
4891  // Allocate and initialize a new arguments object and assign to the
4892  // {arguments} variable.
4893  builder()->CreateArguments(closure_scope()->GetArgumentsType());
4894  BuildVariableAssignment(variable, Token::ASSIGN, HoleCheckMode::kElided);
4895 }
4896 
4897 void BytecodeGenerator::VisitRestArgumentsArray(Variable* rest) {
4898  if (rest == nullptr) return;
4899 
4900  // Allocate and initialize a new rest parameter and assign to the {rest}
4901  // variable.
4902  builder()->CreateArguments(CreateArgumentsType::kRestParameter);
4903  DCHECK(rest->IsContextSlot() || rest->IsStackAllocated());
4904  BuildVariableAssignment(rest, Token::ASSIGN, HoleCheckMode::kElided);
4905 }
4906 
4907 void BytecodeGenerator::VisitThisFunctionVariable(Variable* variable) {
4908  if (variable == nullptr) return;
4909 
4910  // Store the closure we were called with in the given variable.
4911  builder()->LoadAccumulatorWithRegister(Register::function_closure());
4912  BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
4913 }
4914 
4915 void BytecodeGenerator::VisitNewTargetVariable(Variable* variable) {
4916  if (variable == nullptr) return;
4917 
4918  // The generator resume trampoline abuses the new.target register
4919  // to pass in the generator object. In ordinary calls, new.target is always
4920  // undefined because generator functions are non-constructible, so don't
4921  // assign anything to the new.target variable.
4922  if (IsResumableFunction(info()->literal()->kind())) return;
4923 
4924  if (variable->location() == VariableLocation::LOCAL) {
4925  // The new.target register was already assigned by entry trampoline.
4926  DCHECK_EQ(incoming_new_target_or_generator_.index(),
4927  GetRegisterForLocalVariable(variable).index());
4928  return;
4929  }
4930 
4931  // Store the new target we were called with in the given variable.
4932  builder()->LoadAccumulatorWithRegister(incoming_new_target_or_generator_);
4933  BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
4934 }
4935 
4936 void BytecodeGenerator::BuildGeneratorObjectVariableInitialization() {
4937  DCHECK(IsResumableFunction(info()->literal()->kind()));
4938 
4939  Variable* generator_object_var = closure_scope()->generator_object_var();
4940  RegisterAllocationScope register_scope(this);
4941  RegisterList args = register_allocator()->NewRegisterList(2);
4942  Runtime::FunctionId function_id =
4943  (IsAsyncFunction(info()->literal()->kind()) &&
4944  !IsAsyncGeneratorFunction(info()->literal()->kind()))
4945  ? Runtime::kInlineAsyncFunctionEnter
4946  : Runtime::kInlineCreateJSGeneratorObject;
4947  builder()
4948  ->MoveRegister(Register::function_closure(), args[0])
4949  .MoveRegister(builder()->Receiver(), args[1])
4950  .CallRuntime(function_id, args)
4951  .StoreAccumulatorInRegister(generator_object());
4952 
4953  if (generator_object_var->location() == VariableLocation::LOCAL) {
4954  // The generator object register is already set to the variable's local
4955  // register.
4956  DCHECK_EQ(generator_object().index(),
4957  GetRegisterForLocalVariable(generator_object_var).index());
4958  } else {
4959  BuildVariableAssignment(generator_object_var, Token::INIT,
4960  HoleCheckMode::kElided);
4961  }
4962 }
4963 
4964 void BytecodeGenerator::BuildPushUndefinedIntoRegisterList(
4965  RegisterList* reg_list) {
4966  Register reg = register_allocator()->GrowRegisterList(reg_list);
4967  builder()->LoadUndefined().StoreAccumulatorInRegister(reg);
4968 }
4969 
4970 void BytecodeGenerator::BuildLoadPropertyKey(LiteralProperty* property,
4971  Register out_reg) {
4972  if (property->key()->IsStringLiteral()) {
4973  VisitForRegisterValue(property->key(), out_reg);
4974  } else {
4975  VisitForAccumulatorValue(property->key());
4976  builder()->ToName(out_reg);
4977  }
4978 }
4979 
4980 int BytecodeGenerator::AllocateBlockCoverageSlotIfEnabled(
4981  AstNode* node, SourceRangeKind kind) {
4982  return (block_coverage_builder_ == nullptr)
4983  ? BlockCoverageBuilder::kNoCoverageArraySlot
4984  : block_coverage_builder_->AllocateBlockCoverageSlot(node, kind);
4985 }
4986 
4987 int BytecodeGenerator::AllocateNaryBlockCoverageSlotIfEnabled(
4988  NaryOperation* node, size_t index) {
4989  return (block_coverage_builder_ == nullptr)
4990  ? BlockCoverageBuilder::kNoCoverageArraySlot
4991  : block_coverage_builder_->AllocateNaryBlockCoverageSlot(node,
4992  index);
4993 }
4994 
4995 void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
4996  AstNode* node, SourceRangeKind kind) {
4997  if (block_coverage_builder_ == nullptr) return;
4998  block_coverage_builder_->IncrementBlockCounter(node, kind);
4999 }
5000 
5001 void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
5002  int coverage_array_slot) {
5003  if (block_coverage_builder_ != nullptr) {
5004  block_coverage_builder_->IncrementBlockCounter(coverage_array_slot);
5005  }
5006 }
5007 
5008 // Visits the expression |expr| and places the result in the accumulator.
5009 BytecodeGenerator::TypeHint BytecodeGenerator::VisitForAccumulatorValue(
5010  Expression* expr) {
5011  ValueResultScope accumulator_scope(this);
5012  Visit(expr);
5013  return accumulator_scope.type_hint();
5014 }
5015 
5016 void BytecodeGenerator::VisitForAccumulatorValueOrTheHole(Expression* expr) {
5017  if (expr == nullptr) {
5018  builder()->LoadTheHole();
5019  } else {
5020  VisitForAccumulatorValue(expr);
5021  }
5022 }
5023 
5024 // Visits the expression |expr| and discards the result.
5025 void BytecodeGenerator::VisitForEffect(Expression* expr) {
5026  EffectResultScope effect_scope(this);
5027  Visit(expr);
5028 }
5029 
5030 // Visits the expression |expr| and returns the register containing
5031 // the expression result.
5032 Register BytecodeGenerator::VisitForRegisterValue(Expression* expr) {
5033  VisitForAccumulatorValue(expr);
5034  Register result = register_allocator()->NewRegister();
5035  builder()->StoreAccumulatorInRegister(result);
5036  return result;
5037 }
5038 
5039 // Visits the expression |expr| and stores the expression result in
5040 // |destination|.
5041 void BytecodeGenerator::VisitForRegisterValue(Expression* expr,
5042  Register destination) {
5043  ValueResultScope register_scope(this);
5044  Visit(expr);
5045  builder()->StoreAccumulatorInRegister(destination);
5046 }
5047 
5048 // Visits the expression |expr| and pushes the result into a new register
5049 // added to the end of |reg_list|.
5050 void BytecodeGenerator::VisitAndPushIntoRegisterList(Expression* expr,
5051  RegisterList* reg_list) {
5052  {
5053  ValueResultScope register_scope(this);
5054  Visit(expr);
5055  }
5056  // Grow the register list after visiting the expression to avoid reserving
5057  // the register across the expression evaluation, which could cause memory
5058  // leaks for deep expressions due to dead objects being kept alive by pointers
5059  // in registers.
5060  Register destination = register_allocator()->GrowRegisterList(reg_list);
5061  builder()->StoreAccumulatorInRegister(destination);
5062 }
5063 
5064 void BytecodeGenerator::BuildTest(ToBooleanMode mode,
5065  BytecodeLabels* then_labels,
5066  BytecodeLabels* else_labels,
5067  TestFallthrough fallthrough) {
5068  switch (fallthrough) {
5069  case TestFallthrough::kThen:
5070  builder()->JumpIfFalse(mode, else_labels->New());
5071  break;
5072  case TestFallthrough::kElse:
5073  builder()->JumpIfTrue(mode, then_labels->New());
5074  break;
5075  case TestFallthrough::kNone:
5076  builder()->JumpIfTrue(mode, then_labels->New());
5077  builder()->Jump(else_labels->New());
5078  break;
5079  }
5080 }
5081 
5082 // Visits the expression |expr| for testing its boolean value and jumping to the
5083 // |then| or |other| label depending on value and short-circuit semantics
5084 void BytecodeGenerator::VisitForTest(Expression* expr,
5085  BytecodeLabels* then_labels,
5086  BytecodeLabels* else_labels,
5087  TestFallthrough fallthrough) {
5088  bool result_consumed;
5089  TypeHint type_hint;
5090  {
5091  // To make sure that all temporary registers are returned before generating
5092  // jumps below, we ensure that the result scope is deleted before doing so.
5093  // Dead registers might be materialized otherwise.
5094  TestResultScope test_result(this, then_labels, else_labels, fallthrough);
5095  Visit(expr);
5096  result_consumed = test_result.result_consumed_by_test();
5097  type_hint = test_result.type_hint();
5098  // Labels and fallthrough might have been mutated, so update based on
5099  // TestResultScope.
5100  then_labels = test_result.then_labels();
5101  else_labels = test_result.else_labels();
5102  fallthrough = test_result.fallthrough();
5103  }
5104  if (!result_consumed) {
5105  BuildTest(ToBooleanModeFromTypeHint(type_hint), then_labels, else_labels,
5106  fallthrough);
5107  }
5108 }
5109 
5110 void BytecodeGenerator::VisitInSameTestExecutionScope(Expression* expr) {
5111  DCHECK(execution_result()->IsTest());
5112  {
5113  RegisterAllocationScope reg_scope(this);
5114  Visit(expr);
5115  }
5116  if (!execution_result()->AsTest()->result_consumed_by_test()) {
5117  TestResultScope* result_scope = execution_result()->AsTest();
5118  BuildTest(ToBooleanModeFromTypeHint(result_scope->type_hint()),
5119  result_scope->then_labels(), result_scope->else_labels(),
5120  result_scope->fallthrough());
5121  result_scope->SetResultConsumedByTest();
5122  }
5123 }
5124 
5125 void BytecodeGenerator::VisitInScope(Statement* stmt, Scope* scope) {
5126  DCHECK(scope->declarations()->is_empty());
5127  CurrentScope current_scope(this, scope);
5128  ContextScope context_scope(this, scope);
5129  Visit(stmt);
5130 }
5131 
5132 Register BytecodeGenerator::GetRegisterForLocalVariable(Variable* variable) {
5133  DCHECK_EQ(VariableLocation::LOCAL, variable->location());
5134  return builder()->Local(variable->index());
5135 }
5136 
5137 FunctionKind BytecodeGenerator::function_kind() const {
5138  return info()->literal()->kind();
5139 }
5140 
5141 LanguageMode BytecodeGenerator::language_mode() const {
5142  return current_scope()->language_mode();
5143 }
5144 
5145 Register BytecodeGenerator::generator_object() const {
5146  DCHECK(IsResumableFunction(info()->literal()->kind()));
5147  return incoming_new_target_or_generator_;
5148 }
5149 
5150 FeedbackVectorSpec* BytecodeGenerator::feedback_spec() {
5151  return info()->feedback_vector_spec();
5152 }
5153 
5154 int BytecodeGenerator::feedback_index(FeedbackSlot slot) const {
5155  DCHECK(!slot.IsInvalid());
5156  return FeedbackVector::GetIndex(slot);
5157 }
5158 
5159 FeedbackSlot BytecodeGenerator::GetCachedLoadGlobalICSlot(
5160  TypeofMode typeof_mode, Variable* variable) {
5161  FeedbackSlotKind slot_kind =
5162  typeof_mode == INSIDE_TYPEOF
5163  ? FeedbackSlotKind::kLoadGlobalInsideTypeof
5164  : FeedbackSlotKind::kLoadGlobalNotInsideTypeof;
5165  FeedbackSlot slot = feedback_slot_cache()->Get(slot_kind, variable);
5166  if (!slot.IsInvalid()) {
5167  return slot;
5168  }
5169  slot = feedback_spec()->AddLoadGlobalICSlot(typeof_mode);
5170  feedback_slot_cache()->Put(slot_kind, variable, slot);
5171  return slot;
5172 }
5173 
5174 FeedbackSlot BytecodeGenerator::GetCachedStoreGlobalICSlot(
5175  LanguageMode language_mode, Variable* variable) {
5176  FeedbackSlotKind slot_kind = is_strict(language_mode)
5177  ? FeedbackSlotKind::kStoreGlobalStrict
5178  : FeedbackSlotKind::kStoreGlobalSloppy;
5179  FeedbackSlot slot = feedback_slot_cache()->Get(slot_kind, variable);
5180  if (!slot.IsInvalid()) {
5181  return slot;
5182  }
5183  slot = feedback_spec()->AddStoreGlobalICSlot(language_mode);
5184  feedback_slot_cache()->Put(slot_kind, variable, slot);
5185  return slot;
5186 }
5187 
5188 FeedbackSlot BytecodeGenerator::GetCachedLoadICSlot(const Expression* expr,
5189  const AstRawString* name) {
5190  if (!FLAG_ignition_share_named_property_feedback) {
5191  return feedback_spec()->AddLoadICSlot();
5192  }
5193  FeedbackSlotKind slot_kind = FeedbackSlotKind::kLoadProperty;
5194  if (!expr->IsVariableProxy()) {
5195  return feedback_spec()->AddLoadICSlot();
5196  }
5197  const VariableProxy* proxy = expr->AsVariableProxy();
5198  FeedbackSlot slot =
5199  feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name);
5200  if (!slot.IsInvalid()) {
5201  return slot;
5202  }
5203  slot = feedback_spec()->AddLoadICSlot();
5204  feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name, slot);
5205  return slot;
5206 }
5207 
5208 FeedbackSlot BytecodeGenerator::GetCachedStoreICSlot(const Expression* expr,
5209  const AstRawString* name) {
5210  if (!FLAG_ignition_share_named_property_feedback) {
5211  return feedback_spec()->AddStoreICSlot(language_mode());
5212  }
5213  FeedbackSlotKind slot_kind = is_strict(language_mode())
5214  ? FeedbackSlotKind::kStoreNamedStrict
5215  : FeedbackSlotKind::kStoreNamedSloppy;
5216  if (!expr->IsVariableProxy()) {
5217  return feedback_spec()->AddStoreICSlot(language_mode());
5218  }
5219  const VariableProxy* proxy = expr->AsVariableProxy();
5220  FeedbackSlot slot =
5221  feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name);
5222  if (!slot.IsInvalid()) {
5223  return slot;
5224  }
5225  slot = feedback_spec()->AddStoreICSlot(language_mode());
5226  feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name, slot);
5227  return slot;
5228 }
5229 
5230 FeedbackSlot BytecodeGenerator::GetCachedCreateClosureSlot(
5231  FunctionLiteral* literal) {
5232  FeedbackSlotKind slot_kind = FeedbackSlotKind::kCreateClosure;
5233  FeedbackSlot slot = feedback_slot_cache()->Get(slot_kind, literal);
5234  if (!slot.IsInvalid()) {
5235  return slot;
5236  }
5237  slot = feedback_spec()->AddCreateClosureSlot();
5238  feedback_slot_cache()->Put(slot_kind, literal, slot);
5239  return slot;
5240 }
5241 
5242 FeedbackSlot BytecodeGenerator::GetDummyCompareICSlot() {
5243  return dummy_feedback_slot_.Get();
5244 }
5245 
5246 Runtime::FunctionId BytecodeGenerator::StoreToSuperRuntimeId() {
5247  return is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
5248  : Runtime::kStoreToSuper_Sloppy;
5249 }
5250 
5251 Runtime::FunctionId BytecodeGenerator::StoreKeyedToSuperRuntimeId() {
5252  return is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
5253  : Runtime::kStoreKeyedToSuper_Sloppy;
5254 }
5255 
5256 } // namespace interpreter
5257 } // namespace internal
5258 } // namespace v8
V8_INLINE bool IsEmpty() const
Definition: v8.h:195
Definition: v8.h:85
Definition: libplatform.h:13