V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
code-generator.h
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_COMPILER_BACKEND_CODE_GENERATOR_H_
6 #define V8_COMPILER_BACKEND_CODE_GENERATOR_H_
7 
8 #include "src/base/optional.h"
9 #include "src/compiler/backend/gap-resolver.h"
10 #include "src/compiler/backend/instruction.h"
11 #include "src/compiler/backend/unwinding-info-writer.h"
12 #include "src/compiler/osr.h"
13 #include "src/deoptimizer.h"
14 #include "src/macro-assembler.h"
15 #include "src/safepoint-table.h"
16 #include "src/source-position-table.h"
17 #include "src/trap-handler/trap-handler.h"
18 
19 namespace v8 {
20 namespace internal {
21 
22 class OptimizedCompilationInfo;
23 
24 namespace compiler {
25 
26 // Forward declarations.
27 class DeoptimizationExit;
28 class FrameAccessState;
29 class Linkage;
30 class OutOfLineCode;
31 
32 struct BranchInfo {
33  FlagsCondition condition;
34  Label* true_label;
35  Label* false_label;
36  bool fallthru;
37 };
38 
40  public:
41  InstructionOperandIterator(Instruction* instr, size_t pos)
42  : instr_(instr), pos_(pos) {}
43 
44  Instruction* instruction() const { return instr_; }
45  InstructionOperand* Advance() { return instr_->InputAt(pos_++); }
46 
47  private:
48  Instruction* instr_;
49  size_t pos_;
50 };
51 
52 enum class DeoptimizationLiteralKind { kObject, kNumber, kString };
53 
54 // Either a non-null Handle<Object>, a double or a StringConstantBase.
56  public:
57  DeoptimizationLiteral() : object_(), number_(0), string_(nullptr) {}
58  explicit DeoptimizationLiteral(Handle<Object> object)
59  : kind_(DeoptimizationLiteralKind::kObject), object_(object) {
60  DCHECK(!object_.is_null());
61  }
62  explicit DeoptimizationLiteral(double number)
63  : kind_(DeoptimizationLiteralKind::kNumber), number_(number) {}
64  explicit DeoptimizationLiteral(const StringConstantBase* string)
65  : kind_(DeoptimizationLiteralKind::kString), string_(string) {}
66 
67  Handle<Object> object() const { return object_; }
68  const StringConstantBase* string() const { return string_; }
69 
70  bool operator==(const DeoptimizationLiteral& other) const {
71  return kind_ == other.kind_ && object_.equals(other.object_) &&
72  bit_cast<uint64_t>(number_) == bit_cast<uint64_t>(other.number_) &&
73  bit_cast<intptr_t>(string_) == bit_cast<intptr_t>(other.string_);
74  }
75 
76  Handle<Object> Reify(Isolate* isolate) const;
77 
78  DeoptimizationLiteralKind kind() const { return kind_; }
79 
80  private:
81  DeoptimizationLiteralKind kind_;
82 
83  Handle<Object> object_;
84  double number_ = 0;
85  const StringConstantBase* string_ = nullptr;
86 };
87 
88 // Generates native code for a sequence of instructions.
89 class CodeGenerator final : public GapResolver::Assembler {
90  public:
91  explicit CodeGenerator(Zone* codegen_zone, Frame* frame, Linkage* linkage,
92  InstructionSequence* code,
93  OptimizedCompilationInfo* info, Isolate* isolate,
94  base::Optional<OsrHelper> osr_helper,
95  int start_source_position,
96  JumpOptimizationInfo* jump_opt,
97  PoisoningMitigationLevel poisoning_level,
98  const AssemblerOptions& options,
99  int32_t builtin_index);
100 
101  // Generate native code. After calling AssembleCode, call FinalizeCode to
102  // produce the actual code object. If an error occurs during either phase,
103  // FinalizeCode returns an empty MaybeHandle.
104  void AssembleCode(); // Does not need to run on main thread.
105  MaybeHandle<Code> FinalizeCode();
106 
107  OwnedVector<byte> GetSourcePositionTable();
109  GetProtectedInstructions();
110 
111  InstructionSequence* code() const { return code_; }
112  FrameAccessState* frame_access_state() const { return frame_access_state_; }
113  const Frame* frame() const { return frame_access_state_->frame(); }
114  Isolate* isolate() const { return isolate_; }
115  Linkage* linkage() const { return linkage_; }
116 
117  Label* GetLabel(RpoNumber rpo) { return &labels_[rpo.ToSize()]; }
118 
119  void AddProtectedInstructionLanding(uint32_t instr_offset,
120  uint32_t landing_offset);
121 
122  bool wasm_runtime_exception_support() const;
123 
124  SourcePosition start_source_position() const {
125  return start_source_position_;
126  }
127 
128  void AssembleSourcePosition(Instruction* instr);
129  void AssembleSourcePosition(SourcePosition source_position);
130 
131  // Record a safepoint with the given pointer map.
132  void RecordSafepoint(ReferenceMap* references, Safepoint::Kind kind,
133  int arguments, Safepoint::DeoptMode deopt_mode);
134 
135  Zone* zone() const { return zone_; }
136  TurboAssembler* tasm() { return &tasm_; }
137  size_t GetSafepointTableOffset() const { return safepoints_.GetCodeOffset(); }
138  size_t GetHandlerTableOffset() const { return handler_table_offset_; }
139 
140  const ZoneVector<int>& block_starts() const { return block_starts_; }
141  const ZoneVector<int>& instr_starts() const { return instr_starts_; }
142 
143  static constexpr int kBinarySearchSwitchMinimalCases = 4;
144 
145  private:
146  GapResolver* resolver() { return &resolver_; }
147  SafepointTableBuilder* safepoints() { return &safepoints_; }
148  OptimizedCompilationInfo* info() const { return info_; }
149  OsrHelper* osr_helper() { return &(*osr_helper_); }
150 
151  // Create the FrameAccessState object. The Frame is immutable from here on.
152  void CreateFrameAccessState(Frame* frame);
153 
154  // Architecture - specific frame finalization.
155  void FinishFrame(Frame* frame);
156 
157  // Checks if {block} will appear directly after {current_block_} when
158  // assembling code, in which case, a fall-through can be used.
159  bool IsNextInAssemblyOrder(RpoNumber block) const;
160 
161  // Check if a heap object can be materialized by loading from a heap root,
162  // which is cheaper on some platforms than materializing the actual heap
163  // object constant.
164  bool IsMaterializableFromRoot(Handle<HeapObject> object,
165  RootIndex* index_return);
166 
167  enum CodeGenResult { kSuccess, kTooManyDeoptimizationBailouts };
168 
169  // Assemble instructions for the specified block.
170  CodeGenResult AssembleBlock(const InstructionBlock* block);
171 
172  // Inserts mask update at the beginning of an instruction block if the
173  // predecessor blocks ends with a masking branch.
174  void TryInsertBranchPoisoning(const InstructionBlock* block);
175 
176  // Initializes the masking register in the prologue of a function.
177  void InitializeSpeculationPoison();
178  // Reset the masking register during execution of a function.
179  void ResetSpeculationPoison();
180  // Generates a mask from the pc passed in {kJavaScriptCallCodeStartRegister}.
181  void GenerateSpeculationPoisonFromCodeStartRegister();
182 
183  // Assemble code for the specified instruction.
184  CodeGenResult AssembleInstruction(Instruction* instr,
185  const InstructionBlock* block);
186  void AssembleGaps(Instruction* instr);
187 
188  // Compute branch info from given instruction. Returns a valid rpo number
189  // if the branch is redundant, the returned rpo number point to the target
190  // basic block.
191  RpoNumber ComputeBranchInfo(BranchInfo* branch, Instruction* instr);
192 
193  // Returns true if a instruction is a tail call that needs to adjust the stack
194  // pointer before execution. The stack slot index to the empty slot above the
195  // adjusted stack pointer is returned in |slot|.
196  bool GetSlotAboveSPBeforeTailCall(Instruction* instr, int* slot);
197 
198  // Determines how to call helper stubs depending on the code kind.
199  StubCallMode DetermineStubCallMode() const;
200 
201  CodeGenResult AssembleDeoptimizerCall(int deoptimization_id,
202  SourcePosition pos);
203 
204  // ===========================================================================
205  // ============= Architecture-specific code generation methods. ==============
206  // ===========================================================================
207 
208  CodeGenResult AssembleArchInstruction(Instruction* instr);
209  void AssembleArchJump(RpoNumber target);
210  void AssembleArchBranch(Instruction* instr, BranchInfo* branch);
211 
212  // Generates special branch for deoptimization condition.
213  void AssembleArchDeoptBranch(Instruction* instr, BranchInfo* branch);
214 
215  void AssembleArchBoolean(Instruction* instr, FlagsCondition condition);
216  void AssembleArchTrap(Instruction* instr, FlagsCondition condition);
217  void AssembleArchBinarySearchSwitchRange(Register input, RpoNumber def_block,
218  std::pair<int32_t, Label*>* begin,
219  std::pair<int32_t, Label*>* end);
220  void AssembleArchBinarySearchSwitch(Instruction* instr);
221  void AssembleArchLookupSwitch(Instruction* instr);
222  void AssembleArchTableSwitch(Instruction* instr);
223 
224  // Generates code that checks whether the {kJavaScriptCallCodeStartRegister}
225  // contains the expected pointer to the start of the instruction stream.
226  void AssembleCodeStartRegisterCheck();
227 
228  void AssembleBranchPoisoning(FlagsCondition condition, Instruction* instr);
229 
230  // When entering a code that is marked for deoptimization, rather continuing
231  // with its execution, we jump to a lazy compiled code. We need to do this
232  // because this code has already been deoptimized and needs to be unlinked
233  // from the JS functions referring it.
234  void BailoutIfDeoptimized();
235 
236  // Generates code to poison the stack pointer and implicit register arguments
237  // like the context register and the function register.
238  void AssembleRegisterArgumentPoisoning();
239 
240  // Generates an architecture-specific, descriptor-specific prologue
241  // to set up a stack frame.
242  void AssembleConstructFrame();
243 
244  // Generates an architecture-specific, descriptor-specific return sequence
245  // to tear down a stack frame.
246  void AssembleReturn(InstructionOperand* pop);
247 
248  void AssembleDeconstructFrame();
249 
250  // Generates code to manipulate the stack in preparation for a tail call.
251  void AssemblePrepareTailCall();
252 
253  // Generates code to pop current frame if it is an arguments adaptor frame.
254  void AssemblePopArgumentsAdaptorFrame(Register args_reg, Register scratch1,
255  Register scratch2, Register scratch3);
256 
257  enum PushTypeFlag {
258  kImmediatePush = 0x1,
259  kRegisterPush = 0x2,
260  kStackSlotPush = 0x4,
261  kScalarPush = kRegisterPush | kStackSlotPush
262  };
263 
265 
266  static bool IsValidPush(InstructionOperand source, PushTypeFlags push_type);
267 
268  // Generate a list moves from an instruction that are candidates to be turned
269  // into push instructions on platforms that support them. In general, the list
270  // of push candidates are moves to a set of contiguous destination
271  // InstructionOperand locations on the stack that don't clobber values that
272  // are needed for resolve the gap or use values generated by the gap,
273  // i.e. moves that can be hoisted together before the actual gap and assembled
274  // together.
275  static void GetPushCompatibleMoves(Instruction* instr,
276  PushTypeFlags push_type,
277  ZoneVector<MoveOperands*>* pushes);
278 
279  class MoveType {
280  public:
281  enum Type {
282  kRegisterToRegister,
283  kRegisterToStack,
284  kStackToRegister,
285  kStackToStack,
286  kConstantToRegister,
287  kConstantToStack
288  };
289 
290  // Detect what type of move or swap needs to be performed. Note that these
291  // functions do not take into account the representation (Tagged, FP,
292  // ...etc).
293 
294  static Type InferMove(InstructionOperand* source,
295  InstructionOperand* destination);
296  static Type InferSwap(InstructionOperand* source,
297  InstructionOperand* destination);
298  };
299  // Called before a tail call |instr|'s gap moves are assembled and allows
300  // gap-specific pre-processing, e.g. adjustment of the sp for tail calls that
301  // need it before gap moves or conversion of certain gap moves into pushes.
302  void AssembleTailCallBeforeGap(Instruction* instr,
303  int first_unused_stack_slot);
304  // Called after a tail call |instr|'s gap moves are assembled and allows
305  // gap-specific post-processing, e.g. adjustment of the sp for tail calls that
306  // need it after gap moves.
307  void AssembleTailCallAfterGap(Instruction* instr,
308  int first_unused_stack_slot);
309 
310  void FinishCode();
311 
312  // ===========================================================================
313  // ============== Architecture-specific gap resolver methods. ================
314  // ===========================================================================
315 
316  // Interface used by the gap resolver to emit moves and swaps.
317  void AssembleMove(InstructionOperand* source,
318  InstructionOperand* destination) final;
319  void AssembleSwap(InstructionOperand* source,
320  InstructionOperand* destination) final;
321 
322  // ===========================================================================
323  // =================== Jump table construction methods. ======================
324  // ===========================================================================
325 
326  class JumpTable;
327  // Adds a jump table that is emitted after the actual code. Returns label
328  // pointing to the beginning of the table. {targets} is assumed to be static
329  // or zone allocated.
330  Label* AddJumpTable(Label** targets, size_t target_count);
331  // Emits a jump table.
332  void AssembleJumpTable(Label** targets, size_t target_count);
333 
334  // ===========================================================================
335  // ================== Deoptimization table construction. =====================
336  // ===========================================================================
337 
338  void RecordCallPosition(Instruction* instr);
339  Handle<DeoptimizationData> GenerateDeoptimizationData();
340  int DefineDeoptimizationLiteral(DeoptimizationLiteral literal);
341  DeoptimizationEntry const& GetDeoptimizationEntry(Instruction* instr,
342  size_t frame_state_offset);
343  DeoptimizeKind GetDeoptimizationKind(int deoptimization_id) const;
344  DeoptimizeReason GetDeoptimizationReason(int deoptimization_id) const;
345  int BuildTranslation(Instruction* instr, int pc_offset,
346  size_t frame_state_offset,
347  OutputFrameStateCombine state_combine);
348  void BuildTranslationForFrameStateDescriptor(
350  Translation* translation, OutputFrameStateCombine state_combine);
351  void TranslateStateValueDescriptor(StateValueDescriptor* desc,
352  StateValueList* nested,
353  Translation* translation,
355  void TranslateFrameStateDescriptorOperands(FrameStateDescriptor* desc,
357  Translation* translation);
358  void AddTranslationForOperand(Translation* translation, Instruction* instr,
360  void MarkLazyDeoptSite();
361 
362  DeoptimizationExit* AddDeoptimizationExit(Instruction* instr,
363  size_t frame_state_offset);
364 
365  // ===========================================================================
366 
367  class DeoptimizationState final : public ZoneObject {
368  public:
369  DeoptimizationState(BailoutId bailout_id, int translation_id, int pc_offset,
370  DeoptimizeKind kind, DeoptimizeReason reason)
371  : bailout_id_(bailout_id),
372  translation_id_(translation_id),
373  pc_offset_(pc_offset),
374  kind_(kind),
375  reason_(reason) {}
376 
377  BailoutId bailout_id() const { return bailout_id_; }
378  int translation_id() const { return translation_id_; }
379  int pc_offset() const { return pc_offset_; }
380  DeoptimizeKind kind() const { return kind_; }
381  DeoptimizeReason reason() const { return reason_; }
382 
383  private:
384  BailoutId bailout_id_;
385  int translation_id_;
386  int pc_offset_;
387  DeoptimizeKind kind_;
388  DeoptimizeReason reason_;
389  };
390 
391  struct HandlerInfo {
392  Label* handler;
393  int pc_offset;
394  };
395 
396  friend class OutOfLineCode;
397  friend class CodeGeneratorTester;
398 
399  Zone* zone_;
400  Isolate* isolate_;
401  FrameAccessState* frame_access_state_;
402  Linkage* const linkage_;
403  InstructionSequence* const code_;
404  UnwindingInfoWriter unwinding_info_writer_;
405  OptimizedCompilationInfo* const info_;
406  Label* const labels_;
407  Label return_label_;
408  RpoNumber current_block_;
409  SourcePosition start_source_position_;
410  SourcePosition current_source_position_;
411  TurboAssembler tasm_;
412  GapResolver resolver_;
413  SafepointTableBuilder safepoints_;
414  ZoneVector<HandlerInfo> handlers_;
415  ZoneDeque<DeoptimizationExit*> deoptimization_exits_;
416  ZoneDeque<DeoptimizationState*> deoptimization_states_;
417  ZoneDeque<DeoptimizationLiteral> deoptimization_literals_;
418  size_t inlined_function_count_;
419  TranslationBuffer translations_;
420  int handler_table_offset_;
421  int last_lazy_deopt_pc_;
422 
423  // kArchCallCFunction could be reached either:
424  // kArchCallCFunction;
425  // or:
426  // kArchSaveCallerRegisters;
427  // kArchCallCFunction;
428  // kArchRestoreCallerRegisters;
429  // The boolean is used to distinguish the two cases. In the latter case, we
430  // also need to decide if FP registers need to be saved, which is controlled
431  // by fp_mode_.
432  bool caller_registers_saved_;
433  SaveFPRegsMode fp_mode_;
434 
435  JumpTable* jump_tables_;
436  OutOfLineCode* ools_;
437  base::Optional<OsrHelper> osr_helper_;
438  int osr_pc_offset_;
439  int optimized_out_literal_id_;
440  SourcePositionTableBuilder source_position_table_builder_;
442  CodeGenResult result_;
443  PoisoningMitigationLevel poisoning_level_;
444  ZoneVector<int> block_starts_;
445  ZoneVector<int> instr_starts_;
446 };
447 
448 } // namespace compiler
449 } // namespace internal
450 } // namespace v8
451 
452 #endif // V8_COMPILER_BACKEND_CODE_GENERATOR_H_
Definition: libplatform.h:13