V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
code.h
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_CODE_H_
6 #define V8_OBJECTS_CODE_H_
7 
8 #include "src/contexts.h"
9 #include "src/handler-table.h"
10 #include "src/objects.h"
11 #include "src/objects/fixed-array.h"
12 #include "src/objects/heap-object.h"
13 
14 // Has to be the last include (doesn't have include guards):
15 #include "src/objects/object-macros.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 class ByteArray;
21 class BytecodeArray;
22 class CodeDataContainer;
23 class MaybeObject;
24 
25 namespace interpreter {
26 class Register;
27 }
28 
29 // Code describes objects with on-the-fly generated machine code.
30 class Code : public HeapObjectPtr {
31  public:
32  NEVER_READ_ONLY_SPACE
33  // Opaque data type for encapsulating code flags like kind, inline
34  // cache state, and arguments count.
35  typedef uint32_t Flags;
36 
37 #define CODE_KIND_LIST(V) \
38  V(OPTIMIZED_FUNCTION) \
39  V(BYTECODE_HANDLER) \
40  V(STUB) \
41  V(BUILTIN) \
42  V(REGEXP) \
43  V(WASM_FUNCTION) \
44  V(WASM_TO_JS_FUNCTION) \
45  V(JS_TO_WASM_FUNCTION) \
46  V(WASM_INTERPRETER_ENTRY) \
47  V(C_WASM_ENTRY)
48 
49  enum Kind {
50 #define DEFINE_CODE_KIND_ENUM(name) name,
51  CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
52 #undef DEFINE_CODE_KIND_ENUM
53  NUMBER_OF_KINDS
54  };
55 
56  static const char* Kind2String(Kind kind);
57 
58 #ifdef ENABLE_DISASSEMBLER
59  const char* GetName(Isolate* isolate) const;
60  void Disassemble(const char* name, std::ostream& os,
61  Address current_pc = kNullAddress);
62 #endif
63 
64  // [instruction_size]: Size of the native instructions, including embedded
65  // data such as the safepoints table.
66  inline int raw_instruction_size() const;
67  inline void set_raw_instruction_size(int value);
68 
69  // Returns the size of the native instructions, including embedded
70  // data such as the safepoints table. For off-heap code objects
71  // this may from instruction_size in that this will return the size of the
72  // off-heap instruction stream rather than the on-heap trampoline located
73  // at instruction_start.
74  inline int InstructionSize() const;
75  int OffHeapInstructionSize() const;
76 
77  // [relocation_info]: Code relocation information
78  DECL_ACCESSORS2(relocation_info, ByteArray)
79 
80  // This function should be called only from GC.
81  void ClearEmbeddedObjects(Heap* heap);
82 
83  // [deoptimization_data]: Array containing data for deopt.
84  DECL_ACCESSORS2(deoptimization_data, FixedArray)
85 
86  // [source_position_table]: ByteArray for the source positions table or
87  // SourcePositionTableWithFrameCache.
88  DECL_ACCESSORS(source_position_table, Object)
89  inline ByteArray SourcePositionTable() const;
90 
91  // [code_data_container]: A container indirection for all mutable fields.
92  DECL_ACCESSORS(code_data_container, CodeDataContainer)
93 
94  // [stub_key]: The major/minor key of a code stub.
95  inline uint32_t stub_key() const;
96  inline void set_stub_key(uint32_t key);
97 
98  // [next_code_link]: Link for lists of optimized or deoptimized code.
99  // Note that this field is stored in the {CodeDataContainer} to be mutable.
100  inline Object* next_code_link() const;
101  inline void set_next_code_link(Object* value);
102 
103  // [constant_pool offset]: Offset of the constant pool.
104  // Valid for FLAG_enable_embedded_constant_pool only
105  inline int constant_pool_offset() const;
106  inline void set_constant_pool_offset(int offset);
107 
108  // Unchecked accessors to be used during GC.
109  inline ByteArray unchecked_relocation_info() const;
110 
111  inline int relocation_size() const;
112 
113  // [kind]: Access to specific code kind.
114  inline Kind kind() const;
115 
116  inline bool is_stub() const;
117  inline bool is_optimized_code() const;
118  inline bool is_wasm_code() const;
119 
120  // Testers for interpreter builtins.
121  inline bool is_interpreter_trampoline_builtin() const;
122 
123  // Tells whether the code checks the optimization marker in the function's
124  // feedback vector.
125  inline bool checks_optimization_marker() const;
126 
127  // Tells whether the outgoing parameters of this code are tagged pointers.
128  inline bool has_tagged_params() const;
129 
130  // [is_turbofanned]: For kind STUB or OPTIMIZED_FUNCTION, tells whether the
131  // code object was generated by the TurboFan optimizing compiler.
132  inline bool is_turbofanned() const;
133 
134  // [can_have_weak_objects]: For kind OPTIMIZED_FUNCTION, tells whether the
135  // embedded objects in code should be treated weakly.
136  inline bool can_have_weak_objects() const;
137  inline void set_can_have_weak_objects(bool value);
138 
139  // [is_construct_stub]: For kind BUILTIN, tells whether the code object
140  // represents a hand-written construct stub
141  // (e.g., NumberConstructor_ConstructStub).
142  inline bool is_construct_stub() const;
143  inline void set_is_construct_stub(bool value);
144 
145  // [builtin_index]: For builtins, tells which builtin index the code object
146  // has. The builtin index is a non-negative integer for builtins, and -1
147  // otherwise.
148  inline int builtin_index() const;
149  inline void set_builtin_index(int id);
150  inline bool is_builtin() const;
151 
152  inline bool has_safepoint_info() const;
153 
154  // [stack_slots]: If {has_safepoint_info()}, the number of stack slots
155  // reserved in the code prologue.
156  inline int stack_slots() const;
157 
158  // [safepoint_table_offset]: If {has_safepoint_info()}, the offset in the
159  // instruction stream where the safepoint table starts.
160  inline int safepoint_table_offset() const;
161  inline void set_safepoint_table_offset(int offset);
162 
163  // [handler_table_offset]: The offset in the instruction stream where the
164  // exception handler table starts.
165  inline int handler_table_offset() const;
166  inline void set_handler_table_offset(int offset);
167 
168  // [marked_for_deoptimization]: For kind OPTIMIZED_FUNCTION tells whether
169  // the code is going to be deoptimized.
170  inline bool marked_for_deoptimization() const;
171  inline void set_marked_for_deoptimization(bool flag);
172 
173  // [embedded_objects_cleared]: For kind OPTIMIZED_FUNCTION tells whether
174  // the embedded objects in the code marked for deoptimization were cleared.
175  // Note that embedded_objects_cleared() implies marked_for_deoptimization().
176  inline bool embedded_objects_cleared() const;
177  inline void set_embedded_objects_cleared(bool flag);
178 
179  // [deopt_already_counted]: For kind OPTIMIZED_FUNCTION tells whether
180  // the code was already deoptimized.
181  inline bool deopt_already_counted() const;
182  inline void set_deopt_already_counted(bool flag);
183 
184  // [is_promise_rejection]: For kind BUILTIN tells whether the
185  // exception thrown by the code will lead to promise rejection or
186  // uncaught if both this and is_exception_caught is set.
187  // Use GetBuiltinCatchPrediction to access this.
188  inline void set_is_promise_rejection(bool flag);
189 
190  // [is_exception_caught]: For kind BUILTIN tells whether the
191  // exception thrown by the code will be caught internally or
192  // uncaught if both this and is_promise_rejection is set.
193  // Use GetBuiltinCatchPrediction to access this.
194  inline void set_is_exception_caught(bool flag);
195 
196  // [is_off_heap_trampoline]: For kind BUILTIN tells whether
197  // this is a trampoline to an off-heap builtin.
198  inline bool is_off_heap_trampoline() const;
199 
200  // [constant_pool]: The constant pool for this function.
201  inline Address constant_pool() const;
202 
203  // Get the safepoint entry for the given pc.
204  SafepointEntry GetSafepointEntry(Address pc);
205 
206  // The entire code object including its header is copied verbatim to the
207  // snapshot so that it can be written in one, fast, memcpy during
208  // deserialization. The deserializer will overwrite some pointers, rather
209  // like a runtime linker, but the random allocation addresses used in the
210  // mksnapshot process would still be present in the unlinked snapshot data,
211  // which would make snapshot production non-reproducible. This method wipes
212  // out the to-be-overwritten header data for reproducible snapshots.
213  inline void WipeOutHeader();
214 
215  // Clear uninitialized padding space. This ensures that the snapshot content
216  // is deterministic.
217  inline void clear_padding();
218  // Initialize the flags field. Similar to clear_padding above this ensure that
219  // the snapshot content is deterministic.
220  inline void initialize_flags(Kind kind, bool has_unwinding_info,
221  bool is_turbofanned, int stack_slots,
222  bool is_off_heap_trampoline);
223 
224  // Convert a target address into a code object.
225  static inline Code GetCodeFromTargetAddress(Address address);
226 
227  // Convert an entry address into an object.
228  static inline Object* GetObjectFromEntryAddress(Address location_of_address);
229 
230  // Convert a code entry into an object.
231  static inline Object* GetObjectFromCodeEntry(Address code_entry);
232 
233  // Returns the address of the first instruction.
234  inline Address raw_instruction_start() const;
235 
236  // Returns the address of the first instruction. For off-heap code objects
237  // this differs from instruction_start (which would point to the off-heap
238  // trampoline instead).
239  inline Address InstructionStart() const;
240  Address OffHeapInstructionStart() const;
241 
242  // Returns the address right after the last instruction.
243  inline Address raw_instruction_end() const;
244 
245  // Returns the address right after the last instruction. For off-heap code
246  // objects this differs from instruction_end (which would point to the
247  // off-heap trampoline instead).
248  inline Address InstructionEnd() const;
249  Address OffHeapInstructionEnd() const;
250 
251  // Returns the size of the instructions, padding, relocation and unwinding
252  // information.
253  inline int body_size() const;
254 
255  // Returns the size of code and its metadata. This includes the size of code
256  // relocation information, deoptimization data and handler table.
257  inline int SizeIncludingMetadata() const;
258 
259  // Returns the address of the first relocation info (read backwards!).
260  inline byte* relocation_start() const;
261 
262  // Returns the address right after the relocation info (read backwards!).
263  inline byte* relocation_end() const;
264 
265  // [has_unwinding_info]: Whether this code object has unwinding information.
266  // If it doesn't, unwinding_information_start() will point to invalid data.
267  //
268  // The body of all code objects has the following layout.
269  //
270  // +--------------------------+ <-- raw_instruction_start()
271  // | instructions |
272  // | ... |
273  // +--------------------------+
274  // | relocation info |
275  // | ... |
276  // +--------------------------+ <-- raw_instruction_end()
277  //
278  // If has_unwinding_info() is false, raw_instruction_end() points to the first
279  // memory location after the end of the code object. Otherwise, the body
280  // continues as follows:
281  //
282  // +--------------------------+
283  // | padding to the next |
284  // | 8-byte aligned address |
285  // +--------------------------+ <-- raw_instruction_end()
286  // | [unwinding_info_size] |
287  // | as uint64_t |
288  // +--------------------------+ <-- unwinding_info_start()
289  // | unwinding info |
290  // | ... |
291  // +--------------------------+ <-- unwinding_info_end()
292  //
293  // and unwinding_info_end() points to the first memory location after the end
294  // of the code object.
295  //
296  inline bool has_unwinding_info() const;
297 
298  // [unwinding_info_size]: Size of the unwinding information.
299  inline int unwinding_info_size() const;
300  inline void set_unwinding_info_size(int value);
301 
302  // Returns the address of the unwinding information, if any.
303  inline Address unwinding_info_start() const;
304 
305  // Returns the address right after the end of the unwinding information.
306  inline Address unwinding_info_end() const;
307 
308  // Code entry point.
309  inline Address entry() const;
310 
311  // Returns true if pc is inside this object's instructions.
312  inline bool contains(Address pc);
313 
314  // Relocate the code by delta bytes. Called to signal that this code
315  // object has been moved by delta bytes.
316  void Relocate(intptr_t delta);
317 
318  // Migrate code from desc without flushing the instruction cache.
319  void CopyFromNoFlush(Heap* heap, const CodeDesc& desc);
320 
321  // Copy the RelocInfo portion of |desc| to |dest|. The ByteArray must be
322  // exactly the same size as the RelocInfo in |desc|.
323  static inline void CopyRelocInfoToByteArray(ByteArray dest,
324  const CodeDesc& desc);
325 
326  // Flushes the instruction cache for the executable instructions of this code
327  // object. Make sure to call this while the code is still writable.
328  void FlushICache() const;
329 
330  // Returns the object size for a given body (used for allocation).
331  static int SizeFor(int body_size) {
332  DCHECK_SIZE_TAG_ALIGNED(body_size);
333  return RoundUp(kHeaderSize + body_size, kCodeAlignment);
334  }
335 
336  // Calculate the size of the code object to report for log events. This takes
337  // the layout of the code object into account.
338  inline int ExecutableSize() const;
339 
340  DECL_CAST2(Code)
341 
342  // Dispatched behavior.
343  inline int CodeSize() const;
344 
345  DECL_PRINTER(Code)
346  DECL_VERIFIER(Code)
347 
348  void PrintDeoptLocation(FILE* out, const char* str, Address pc);
349  bool CanDeoptAt(Address pc);
350 
351  void SetMarkedForDeoptimization(const char* reason);
352 
353  inline HandlerTable::CatchPrediction GetBuiltinCatchPrediction();
354 
355 #ifdef DEBUG
356  enum VerifyMode { kNoContextSpecificPointers, kNoContextRetainingPointers };
357  void VerifyEmbeddedObjects(Isolate* isolate,
358  VerifyMode mode = kNoContextRetainingPointers);
359 #endif // DEBUG
360 
361  bool IsIsolateIndependent(Isolate* isolate);
362 
363  inline bool CanContainWeakObjects();
364 
365  inline bool IsWeakObject(HeapObject* object);
366 
367  static inline bool IsWeakObjectInOptimizedCode(HeapObject* object);
368 
369  // Return true if the function is inlined in the code.
370  bool Inlines(SharedFunctionInfo* sfi);
371 
372  class OptimizedCodeIterator;
373 
374  // Layout description.
375 #define CODE_FIELDS(V) \
376  V(kRelocationInfoOffset, kTaggedSize) \
377  V(kDeoptimizationDataOffset, kTaggedSize) \
378  V(kSourcePositionTableOffset, kTaggedSize) \
379  V(kCodeDataContainerOffset, kTaggedSize) \
380  /* Data or code not directly visited by GC directly starts here. */ \
381  /* The serializer needs to copy bytes starting from here verbatim. */ \
382  /* Objects embedded into code is visited via reloc info. */ \
383  V(kDataStart, 0) \
384  V(kInstructionSizeOffset, kIntSize) \
385  V(kFlagsOffset, kIntSize) \
386  V(kSafepointTableOffsetOffset, kIntSize) \
387  V(kHandlerTableOffsetOffset, kIntSize) \
388  V(kStubKeyOffset, kIntSize) \
389  V(kConstantPoolOffset, FLAG_enable_embedded_constant_pool ? kIntSize : 0) \
390  V(kBuiltinIndexOffset, kIntSize) \
391  /* Add padding to align the instruction start following right after */ \
392  /* the Code object header. */ \
393  V(kHeaderPaddingStart, CODE_POINTER_PADDING(kHeaderPaddingStart)) \
394  V(kHeaderSize, 0)
395 
396  DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_FIELDS)
397 #undef CODE_FIELDS
398 
399  inline int GetUnwindingInfoSizeOffset() const;
400 
401  class BodyDescriptor;
402 
403  // Flags layout. BitField<type, shift, size>.
404 #define CODE_FLAGS_BIT_FIELDS(V, _) \
405  V(HasUnwindingInfoField, bool, 1, _) \
406  V(KindField, Kind, 5, _) \
407  V(IsTurbofannedField, bool, 1, _) \
408  V(StackSlotsField, int, 24, _) \
409  V(IsOffHeapTrampoline, bool, 1, _)
410  DEFINE_BIT_FIELDS(CODE_FLAGS_BIT_FIELDS)
411 #undef CODE_FLAGS_BIT_FIELDS
412  static_assert(NUMBER_OF_KINDS <= KindField::kMax, "Code::KindField size");
413  static_assert(IsOffHeapTrampoline::kNext <= 32,
414  "Code::flags field exhausted");
415 
416  // KindSpecificFlags layout (STUB, BUILTIN and OPTIMIZED_FUNCTION)
417 #define CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS(V, _) \
418  V(MarkedForDeoptimizationField, bool, 1, _) \
419  V(EmbeddedObjectsClearedField, bool, 1, _) \
420  V(DeoptAlreadyCountedField, bool, 1, _) \
421  V(CanHaveWeakObjectsField, bool, 1, _) \
422  V(IsConstructStubField, bool, 1, _) \
423  V(IsPromiseRejectionField, bool, 1, _) \
424  V(IsExceptionCaughtField, bool, 1, _)
425  DEFINE_BIT_FIELDS(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS)
426 #undef CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS
427  static_assert(IsExceptionCaughtField::kNext <= 32, "KindSpecificFlags full");
428 
429  // The {marked_for_deoptimization} field is accessed from generated code.
430  static const int kMarkedForDeoptimizationBit =
431  MarkedForDeoptimizationField::kShift;
432 
433  static const int kArgumentsBits = 16;
434  // Reserve one argument count value as the "don't adapt arguments" sentinel.
435  static const int kMaxArguments = (1 << kArgumentsBits) - 2;
436 
437  private:
438  friend class RelocIterator;
439 
440  bool is_promise_rejection() const;
441  bool is_exception_caught() const;
442 
443  OBJECT_CONSTRUCTORS(Code, HeapObjectPtr);
444 };
445 
447  public:
448  explicit OptimizedCodeIterator(Isolate* isolate);
449  Code Next();
450 
451  private:
452  Context next_context_;
453  Code current_code_;
454  Isolate* isolate_;
455 
456  DISALLOW_HEAP_ALLOCATION(no_gc);
457  DISALLOW_COPY_AND_ASSIGN(OptimizedCodeIterator)
458 };
459 
460 // CodeDataContainer is a container for all mutable fields associated with its
461 // referencing {Code} object. Since {Code} objects reside on write-protected
462 // pages within the heap, its header fields need to be immutable. There always
463 // is a 1-to-1 relation between {Code} and {CodeDataContainer}, the referencing
464 // field {Code::code_data_container} itself is immutable.
466  public:
467  DECL_ACCESSORS(next_code_link, Object)
468  DECL_INT_ACCESSORS(kind_specific_flags)
469 
470  // Clear uninitialized padding space. This ensures that the snapshot content
471  // is deterministic.
472  inline void clear_padding();
473 
474  DECL_CAST(CodeDataContainer)
475 
476  // Dispatched behavior.
477  DECL_PRINTER(CodeDataContainer)
478  DECL_VERIFIER(CodeDataContainer)
479 
480 // Layout description.
481 #define CODE_DATA_FIELDS(V) \
482  /* Weak pointer fields. */ \
483  V(kPointerFieldsStrongEndOffset, 0) \
484  V(kNextCodeLinkOffset, kTaggedSize) \
485  V(kPointerFieldsWeakEndOffset, 0) \
486  /* Raw data fields. */ \
487  V(kKindSpecificFlagsOffset, kIntSize) \
488  V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize)) \
489  /* Total size. */ \
490  V(kSize, 0)
491 
492  DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS)
493 #undef CODE_DATA_FIELDS
494 
495  class BodyDescriptor;
496 
497  private:
498  DISALLOW_IMPLICIT_CONSTRUCTORS(CodeDataContainer);
499 };
500 
501 class AbstractCode : public HeapObjectPtr {
502  public:
503  NEVER_READ_ONLY_SPACE
504  // All code kinds and INTERPRETED_FUNCTION.
505  enum Kind {
506 #define DEFINE_CODE_KIND_ENUM(name) name,
507  CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
508 #undef DEFINE_CODE_KIND_ENUM
509  INTERPRETED_FUNCTION,
510  NUMBER_OF_KINDS
511  };
512 
513  static const char* Kind2String(Kind kind);
514 
515  int SourcePosition(int offset);
516  int SourceStatementPosition(int offset);
517 
518  // Returns the address of the first instruction.
519  inline Address raw_instruction_start();
520 
521  // Returns the address of the first instruction. For off-heap code objects
522  // this differs from instruction_start (which would point to the off-heap
523  // trampoline instead).
524  inline Address InstructionStart();
525 
526  // Returns the address right after the last instruction.
527  inline Address raw_instruction_end();
528 
529  // Returns the address right after the last instruction. For off-heap code
530  // objects this differs from instruction_end (which would point to the
531  // off-heap trampoline instead).
532  inline Address InstructionEnd();
533 
534  // Returns the size of the code instructions.
535  inline int raw_instruction_size();
536 
537  // Returns the size of the native instructions, including embedded
538  // data such as the safepoints table. For off-heap code objects
539  // this may from instruction_size in that this will return the size of the
540  // off-heap instruction stream rather than the on-heap trampoline located
541  // at instruction_start.
542  inline int InstructionSize();
543 
544  // Return the source position table.
545  inline ByteArray source_position_table();
546 
547  inline Object* stack_frame_cache();
548  static void SetStackFrameCache(Handle<AbstractCode> abstract_code,
550  void DropStackFrameCache();
551 
552  // Returns the size of instructions and the metadata.
553  inline int SizeIncludingMetadata();
554 
555  // Returns true if pc is inside this object's instructions.
556  inline bool contains(Address pc);
557 
558  // Returns the AbstractCode::Kind of the code.
559  inline Kind kind();
560 
561  // Calculate the size of the code object to report for log events. This takes
562  // the layout of the code object into account.
563  inline int ExecutableSize();
564 
565  DECL_CAST2(AbstractCode)
566  inline Code GetCode();
567  inline BytecodeArray GetBytecodeArray();
568 
569  // Max loop nesting marker used to postpose OSR. We don't take loop
570  // nesting that is deeper than 5 levels into account.
571  static const int kMaxLoopNestingMarker = 6;
572 
573  OBJECT_CONSTRUCTORS(AbstractCode, HeapObjectPtr)
574 };
575 
576 // Dependent code is a singly linked list of weak fixed arrays. Each array
577 // contains weak pointers to code objects for one dependent group. The suffix of
578 // the array can be filled with the undefined value if the number of codes is
579 // less than the length of the array.
580 //
581 // +------+-----------------+--------+--------+-----+--------+-----------+-----+
582 // | next | count & group 1 | code 1 | code 2 | ... | code n | undefined | ... |
583 // +------+-----------------+--------+--------+-----+--------+-----------+-----+
584 // |
585 // V
586 // +------+-----------------+--------+--------+-----+--------+-----------+-----+
587 // | next | count & group 2 | code 1 | code 2 | ... | code m | undefined | ... |
588 // +------+-----------------+--------+--------+-----+--------+-----------+-----+
589 // |
590 // V
591 // empty_weak_fixed_array()
592 //
593 // The list of weak fixed arrays is ordered by dependency groups.
594 
596  public:
597  DECL_CAST(DependentCode)
598 
599  enum DependencyGroup {
600  // Group of code that embed a transition to this map, and depend on being
601  // deoptimized when the transition is replaced by a new version.
602  kTransitionGroup,
603  // Group of code that omit run-time prototype checks for prototypes
604  // described by this map. The group is deoptimized whenever an object
605  // described by this map changes shape (and transitions to a new map),
606  // possibly invalidating the assumptions embedded in the code.
607  kPrototypeCheckGroup,
608  // Group of code that depends on global property values in property cells
609  // not being changed.
610  kPropertyCellChangedGroup,
611  // Group of code that omit run-time checks for field(s) introduced by
612  // this map, i.e. for the field type.
613  kFieldOwnerGroup,
614  // Group of code that omit run-time type checks for initial maps of
615  // constructors.
616  kInitialMapChangedGroup,
617  // Group of code that depends on tenuring information in AllocationSites
618  // not being changed.
619  kAllocationSiteTenuringChangedGroup,
620  // Group of code that depends on element transition information in
621  // AllocationSites not being changed.
622  kAllocationSiteTransitionChangedGroup
623  };
624 
625  // Register a code dependency of {cell} on {object}.
626  static void InstallDependency(Isolate* isolate, const MaybeObjectHandle& code,
627  Handle<HeapObject> object,
628  DependencyGroup group);
629 
630  void DeoptimizeDependentCodeGroup(Isolate* isolate, DependencyGroup group);
631 
632  bool MarkCodeForDeoptimization(Isolate* isolate, DependencyGroup group);
633 
634  // The following low-level accessors are exposed only for tests.
635  inline DependencyGroup group();
636  inline MaybeObject object_at(int i);
637  inline int count();
638  inline DependentCode* next_link();
639 
640  private:
641  static const char* DependencyGroupName(DependencyGroup group);
642 
643  // Get/Set {object}'s {DependentCode}.
644  static DependentCode* GetDependentCode(Handle<HeapObject> object);
645  static void SetDependentCode(Handle<HeapObject> object,
647 
648  static Handle<DependentCode> New(Isolate* isolate, DependencyGroup group,
649  const MaybeObjectHandle& object,
650  Handle<DependentCode> next);
651  static Handle<DependentCode> EnsureSpace(Isolate* isolate,
652  Handle<DependentCode> entries);
653  static Handle<DependentCode> InsertWeakCode(Isolate* isolate,
654  Handle<DependentCode> entries,
655  DependencyGroup group,
656  const MaybeObjectHandle& code);
657 
658  // Compact by removing cleared weak cells and return true if there was
659  // any cleared weak cell.
660  bool Compact();
661 
662  static int Grow(int number_of_entries) {
663  if (number_of_entries < 5) return number_of_entries + 1;
664  return number_of_entries * 5 / 4;
665  }
666 
667  static const int kGroupCount = kAllocationSiteTransitionChangedGroup + 1;
668  static const int kNextLinkIndex = 0;
669  static const int kFlagsIndex = 1;
670  static const int kCodesStartIndex = 2;
671 
672  inline void set_next_link(DependentCode* next);
673  inline void set_count(int value);
674  inline void set_object_at(int i, MaybeObject object);
675  inline void clear_at(int i);
676  inline void copy(int from, int to);
677 
678  inline int flags();
679  inline void set_flags(int flags);
680  class GroupField : public BitField<int, 0, 3> {};
681  class CountField : public BitField<int, 3, 27> {};
682  STATIC_ASSERT(kGroupCount <= GroupField::kMax + 1);
683 };
684 
685 // BytecodeArray represents a sequence of interpreter bytecodes.
687  public:
688  enum Age {
689  kNoAgeBytecodeAge = 0,
690  kQuadragenarianBytecodeAge,
691  kQuinquagenarianBytecodeAge,
692  kSexagenarianBytecodeAge,
693  kSeptuagenarianBytecodeAge,
694  kOctogenarianBytecodeAge,
695  kAfterLastBytecodeAge,
696  kFirstBytecodeAge = kNoAgeBytecodeAge,
697  kLastBytecodeAge = kAfterLastBytecodeAge - 1,
698  kBytecodeAgeCount = kAfterLastBytecodeAge - kFirstBytecodeAge - 1,
699  kIsOldBytecodeAge = kSexagenarianBytecodeAge
700  };
701 
702  static int SizeFor(int length) {
703  return OBJECT_POINTER_ALIGN(kHeaderSize + length);
704  }
705 
706  // Setter and getter
707  inline byte get(int index);
708  inline void set(int index, byte value);
709 
710  // Returns data start address.
711  inline Address GetFirstBytecodeAddress();
712 
713  // Accessors for frame size.
714  inline int frame_size() const;
715  inline void set_frame_size(int frame_size);
716 
717  // Accessor for register count (derived from frame_size).
718  inline int register_count() const;
719 
720  // Accessors for parameter count (including implicit 'this' receiver).
721  inline int parameter_count() const;
722  inline void set_parameter_count(int number_of_parameters);
723 
724  // Register used to pass the incoming new.target or generator object from the
725  // fucntion call.
726  inline interpreter::Register incoming_new_target_or_generator_register()
727  const;
728  inline void set_incoming_new_target_or_generator_register(
729  interpreter::Register incoming_new_target_or_generator_register);
730 
731  // Accessors for profiling count.
732  inline int interrupt_budget() const;
733  inline void set_interrupt_budget(int interrupt_budget);
734 
735  // Accessors for OSR loop nesting level.
736  inline int osr_loop_nesting_level() const;
737  inline void set_osr_loop_nesting_level(int depth);
738 
739  // Accessors for bytecode's code age.
740  inline Age bytecode_age() const;
741  inline void set_bytecode_age(Age age);
742 
743  // Accessors for the constant pool.
744  DECL_ACCESSORS2(constant_pool, FixedArray)
745 
746  // Accessors for handler table containing offsets of exception handlers.
747  DECL_ACCESSORS2(handler_table, ByteArray)
748 
749  // Accessors for source position table containing mappings between byte code
750  // offset and source position or SourcePositionTableWithFrameCache.
751  DECL_ACCESSORS(source_position_table, Object)
752 
754  inline void ClearFrameCacheFromSourcePositionTable();
755 
756  DECL_CAST2(BytecodeArray)
757 
758  // Dispatched behavior.
759  inline int BytecodeArraySize();
760 
761  inline int raw_instruction_size();
762 
763  // Returns the size of bytecode and its metadata. This includes the size of
764  // bytecode, constant pool, source position table, and handler table.
765  inline int SizeIncludingMetadata();
766 
767  int SourcePosition(int offset);
768  int SourceStatementPosition(int offset);
769 
770  DECL_PRINTER(BytecodeArray)
771  DECL_VERIFIER(BytecodeArray)
772 
773  void Disassemble(std::ostream& os);
774 
775  void CopyBytecodesTo(BytecodeArray to);
776 
777  // Bytecode aging
778  bool IsOld() const;
779  void MakeOlder();
780 
781  // Clear uninitialized padding space. This ensures that the snapshot content
782  // is deterministic.
783  inline void clear_padding();
784 
785 // Layout description.
786 #define BYTECODE_ARRAY_FIELDS(V) \
787  /* Pointer fields. */ \
788  V(kConstantPoolOffset, kTaggedSize) \
789  V(kHandlerTableOffset, kTaggedSize) \
790  V(kSourcePositionTableOffset, kTaggedSize) \
791  V(kFrameSizeOffset, kIntSize) \
792  V(kParameterSizeOffset, kIntSize) \
793  V(kIncomingNewTargetOrGeneratorRegisterOffset, kIntSize) \
794  V(kInterruptBudgetOffset, kIntSize) \
795  V(kOSRNestingLevelOffset, kCharSize) \
796  V(kBytecodeAgeOffset, kCharSize) \
797  /* Total size. */ \
798  V(kHeaderSize, 0)
799 
800  DEFINE_FIELD_OFFSET_CONSTANTS(FixedArrayBase::kHeaderSize,
801  BYTECODE_ARRAY_FIELDS)
802 #undef BYTECODE_ARRAY_FIELDS
803 
804  // Maximal memory consumption for a single BytecodeArray.
805  static const int kMaxSize = 512 * MB;
806  // Maximal length of a single BytecodeArray.
807  static const int kMaxLength = kMaxSize - kHeaderSize;
808 
809  class BodyDescriptor;
810 
811  OBJECT_CONSTRUCTORS(BytecodeArray, FixedArrayBase);
812 };
813 
814 // DeoptimizationData is a fixed array used to hold the deoptimization data for
815 // optimized code. It also contains information about functions that were
816 // inlined. If N different functions were inlined then the first N elements of
817 // the literal array will contain these functions.
818 //
819 // It can be empty.
821  public:
822  // Layout description. Indices in the array.
823  static const int kTranslationByteArrayIndex = 0;
824  static const int kInlinedFunctionCountIndex = 1;
825  static const int kLiteralArrayIndex = 2;
826  static const int kOsrBytecodeOffsetIndex = 3;
827  static const int kOsrPcOffsetIndex = 4;
828  static const int kOptimizationIdIndex = 5;
829  static const int kSharedFunctionInfoIndex = 6;
830  static const int kInliningPositionsIndex = 7;
831  static const int kFirstDeoptEntryIndex = 8;
832 
833  // Offsets of deopt entry elements relative to the start of the entry.
834  static const int kBytecodeOffsetRawOffset = 0;
835  static const int kTranslationIndexOffset = 1;
836  static const int kPcOffset = 2;
837  static const int kDeoptEntrySize = 3;
838 
839 // Simple element accessors.
840 #define DECL_ELEMENT_ACCESSORS(name, type) \
841  inline type name() const; \
842  inline void Set##name(type value);
843 
844  DECL_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
845  DECL_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
846  DECL_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
847  DECL_ELEMENT_ACCESSORS(OsrBytecodeOffset, Smi)
848  DECL_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
849  DECL_ELEMENT_ACCESSORS(OptimizationId, Smi)
850  DECL_ELEMENT_ACCESSORS(SharedFunctionInfo, Object*)
851  DECL_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
852 
853 #undef DECL_ELEMENT_ACCESSORS
854 
855 // Accessors for elements of the ith deoptimization entry.
856 #define DECL_ENTRY_ACCESSORS(name, type) \
857  inline type name(int i) const; \
858  inline void Set##name(int i, type value);
859 
860  DECL_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
861  DECL_ENTRY_ACCESSORS(TranslationIndex, Smi)
862  DECL_ENTRY_ACCESSORS(Pc, Smi)
863 
864 #undef DECL_ENTRY_ACCESSORS
865 
866  inline BailoutId BytecodeOffset(int i);
867 
868  inline void SetBytecodeOffset(int i, BailoutId value);
869 
870  inline int DeoptCount();
871 
872  static const int kNotInlinedIndex = -1;
873 
874  // Returns the inlined function at the given position in LiteralArray, or the
875  // outer function if index == kNotInlinedIndex.
876  class SharedFunctionInfo* GetInlinedFunction(int index);
877 
878  // Allocates a DeoptimizationData.
879  static Handle<DeoptimizationData> New(Isolate* isolate, int deopt_entry_count,
880  PretenureFlag pretenure);
881 
882  // Return an empty DeoptimizationData.
883  static Handle<DeoptimizationData> Empty(Isolate* isolate);
884 
885  DECL_CAST2(DeoptimizationData)
886 
887 #ifdef ENABLE_DISASSEMBLER
888  void DeoptimizationDataPrint(std::ostream& os); // NOLINT
889 #endif
890 
891  private:
892  static int IndexForEntry(int i) {
893  return kFirstDeoptEntryIndex + (i * kDeoptEntrySize);
894  }
895 
896  static int LengthFor(int entry_count) { return IndexForEntry(entry_count); }
897 
898  OBJECT_CONSTRUCTORS(DeoptimizationData, FixedArray)
899 };
900 
902  public:
903  DECL_ACCESSORS2(source_position_table, ByteArray)
904  DECL_ACCESSORS2(stack_frame_cache, SimpleNumberDictionary)
905 
907 
908 // Layout description.
909 #define SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS(V) \
910  V(kSourcePositionTableIndex, kTaggedSize) \
911  V(kStackFrameCacheIndex, kTaggedSize) \
912  /* Total size. */ \
913  V(kSize, 0)
914 
915  DEFINE_FIELD_OFFSET_CONSTANTS(Struct::kHeaderSize,
916  SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS)
917 #undef SOURCE_POSITION_TABLE_WITH_FRAME_FIELDS
918 
919  private:
920  DISALLOW_IMPLICIT_CONSTRUCTORS(SourcePositionTableWithFrameCache);
921 };
922 
923 } // namespace internal
924 } // namespace v8
925 
926 #include "src/objects/object-macros-undef.h"
927 
928 #endif // V8_OBJECTS_CODE_H_
Definition: libplatform.h:13