V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
code-inl.h
1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_CODE_INL_H_
6 #define V8_OBJECTS_CODE_INL_H_
7 
8 #include "src/objects/code.h"
9 
10 #include "src/interpreter/bytecode-register.h"
11 #include "src/isolate.h"
12 #include "src/objects/dictionary.h"
13 #include "src/objects/instance-type-inl.h"
14 #include "src/objects/map-inl.h"
15 #include "src/objects/maybe-object-inl.h"
16 #include "src/objects/smi-inl.h"
17 #include "src/v8memory.h"
18 
19 // Has to be the last include (doesn't have include guards):
20 #include "src/objects/object-macros.h"
21 
22 namespace v8 {
23 namespace internal {
24 
25 OBJECT_CONSTRUCTORS_IMPL(DeoptimizationData, FixedArray)
26 OBJECT_CONSTRUCTORS_IMPL(BytecodeArray, FixedArrayBase)
27 OBJECT_CONSTRUCTORS_IMPL(AbstractCode, HeapObjectPtr)
28 
29 NEVER_READ_ONLY_SPACE_IMPL(AbstractCode)
30 
31 CAST_ACCESSOR2(AbstractCode)
32 CAST_ACCESSOR2(BytecodeArray)
33 CAST_ACCESSOR2(Code)
34 CAST_ACCESSOR(CodeDataContainer)
35 CAST_ACCESSOR(DependentCode)
36 CAST_ACCESSOR2(DeoptimizationData)
37 CAST_ACCESSOR(SourcePositionTableWithFrameCache)
38 
39 ACCESSORS2(SourcePositionTableWithFrameCache, source_position_table, ByteArray,
40  kSourcePositionTableIndex)
41 ACCESSORS2(SourcePositionTableWithFrameCache, stack_frame_cache,
42  SimpleNumberDictionary, kStackFrameCacheIndex)
43 
44 int AbstractCode::raw_instruction_size() {
45  if (IsCode()) {
46  return GetCode()->raw_instruction_size();
47  } else {
48  return GetBytecodeArray()->length();
49  }
50 }
51 
52 int AbstractCode::InstructionSize() {
53  if (IsCode()) {
54  return GetCode()->InstructionSize();
55  } else {
56  return GetBytecodeArray()->length();
57  }
58 }
59 
60 ByteArray AbstractCode::source_position_table() {
61  if (IsCode()) {
62  return GetCode()->SourcePositionTable();
63  } else {
64  return GetBytecodeArray()->SourcePositionTable();
65  }
66 }
67 
68 Object* AbstractCode::stack_frame_cache() {
69  Object* maybe_table;
70  if (IsCode()) {
71  maybe_table = GetCode()->source_position_table();
72  } else {
73  maybe_table = GetBytecodeArray()->source_position_table();
74  }
75  if (maybe_table->IsSourcePositionTableWithFrameCache()) {
76  return SourcePositionTableWithFrameCache::cast(maybe_table)
77  ->stack_frame_cache();
78  }
79  return Smi::kZero;
80 }
81 
82 int AbstractCode::SizeIncludingMetadata() {
83  if (IsCode()) {
84  return GetCode()->SizeIncludingMetadata();
85  } else {
86  return GetBytecodeArray()->SizeIncludingMetadata();
87  }
88 }
89 int AbstractCode::ExecutableSize() {
90  if (IsCode()) {
91  return GetCode()->ExecutableSize();
92  } else {
93  return GetBytecodeArray()->BytecodeArraySize();
94  }
95 }
96 
97 Address AbstractCode::raw_instruction_start() {
98  if (IsCode()) {
99  return GetCode()->raw_instruction_start();
100  } else {
101  return GetBytecodeArray()->GetFirstBytecodeAddress();
102  }
103 }
104 
105 Address AbstractCode::InstructionStart() {
106  if (IsCode()) {
107  return GetCode()->InstructionStart();
108  } else {
109  return GetBytecodeArray()->GetFirstBytecodeAddress();
110  }
111 }
112 
113 Address AbstractCode::raw_instruction_end() {
114  if (IsCode()) {
115  return GetCode()->raw_instruction_end();
116  } else {
117  return GetBytecodeArray()->GetFirstBytecodeAddress() +
118  GetBytecodeArray()->length();
119  }
120 }
121 
122 Address AbstractCode::InstructionEnd() {
123  if (IsCode()) {
124  return GetCode()->InstructionEnd();
125  } else {
126  return GetBytecodeArray()->GetFirstBytecodeAddress() +
127  GetBytecodeArray()->length();
128  }
129 }
130 
131 bool AbstractCode::contains(Address inner_pointer) {
132  return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
133 }
134 
135 AbstractCode::Kind AbstractCode::kind() {
136  if (IsCode()) {
137  return static_cast<AbstractCode::Kind>(GetCode()->kind());
138  } else {
139  return INTERPRETED_FUNCTION;
140  }
141 }
142 
143 Code AbstractCode::GetCode() { return Code::cast(*this); }
144 
145 BytecodeArray AbstractCode::GetBytecodeArray() {
146  return BytecodeArray::cast(*this);
147 }
148 
149 DependentCode* DependentCode::next_link() {
150  return DependentCode::cast(Get(kNextLinkIndex)->GetHeapObjectAssumeStrong());
151 }
152 
153 void DependentCode::set_next_link(DependentCode* next) {
154  Set(kNextLinkIndex, HeapObjectReference::Strong(next));
155 }
156 
157 int DependentCode::flags() { return Smi::ToInt(Get(kFlagsIndex)->ToSmi()); }
158 
159 void DependentCode::set_flags(int flags) {
160  Set(kFlagsIndex, MaybeObject::FromObject(Smi::FromInt(flags)));
161 }
162 
163 int DependentCode::count() { return CountField::decode(flags()); }
164 
165 void DependentCode::set_count(int value) {
166  set_flags(CountField::update(flags(), value));
167 }
168 
169 DependentCode::DependencyGroup DependentCode::group() {
170  return static_cast<DependencyGroup>(GroupField::decode(flags()));
171 }
172 
173 void DependentCode::set_object_at(int i, MaybeObject object) {
174  Set(kCodesStartIndex + i, object);
175 }
176 
177 MaybeObject DependentCode::object_at(int i) {
178  return Get(kCodesStartIndex + i);
179 }
180 
181 void DependentCode::clear_at(int i) {
182  Set(kCodesStartIndex + i,
183  HeapObjectReference::Strong(GetReadOnlyRoots().undefined_value()));
184 }
185 
186 void DependentCode::copy(int from, int to) {
187  Set(kCodesStartIndex + to, Get(kCodesStartIndex + from));
188 }
189 
190 OBJECT_CONSTRUCTORS_IMPL(Code, HeapObjectPtr)
191 NEVER_READ_ONLY_SPACE_IMPL(Code)
192 
193 INT_ACCESSORS(Code, raw_instruction_size, kInstructionSizeOffset)
194 INT_ACCESSORS(Code, handler_table_offset, kHandlerTableOffsetOffset)
195 #define CODE_ACCESSORS(name, type, offset) \
196  ACCESSORS_CHECKED2(Code, name, type, offset, true, !Heap::InNewSpace(value))
197 #define CODE_ACCESSORS2(name, type, offset) \
198  ACCESSORS_CHECKED3(Code, name, type, offset, true, !Heap::InNewSpace(value))
199 // TODO(3770): Use shared SYNCHRONIZED_ACCESSORS_CHECKED2 when migrating
200 // CodeDataContainer*.
201 #define SYNCHRONIZED_CODE_ACCESSORS(name, type, offset) \
202  type* Code::name() const { \
203  type* value = type::cast(ACQUIRE_READ_FIELD(this, offset)); \
204  return value; \
205  } \
206  void Code::set_##name(type* value, WriteBarrierMode mode) { \
207  DCHECK(!Heap::InNewSpace(value)); \
208  RELEASE_WRITE_FIELD(this, offset, value); \
209  CONDITIONAL_WRITE_BARRIER(this, offset, value, mode); \
210  }
211 CODE_ACCESSORS2(relocation_info, ByteArray, kRelocationInfoOffset)
212 CODE_ACCESSORS2(deoptimization_data, FixedArray, kDeoptimizationDataOffset)
213 CODE_ACCESSORS(source_position_table, Object, kSourcePositionTableOffset)
214 // Concurrent marker needs to access kind specific flags in code data container.
215 SYNCHRONIZED_CODE_ACCESSORS(code_data_container, CodeDataContainer,
216  kCodeDataContainerOffset)
217 #undef CODE_ACCESSORS
218 #undef CODE_ACCESSORS2
219 #undef SYNCHRONIZED_CODE_ACCESSORS
220 
221 void Code::WipeOutHeader() {
222  WRITE_FIELD(this, kRelocationInfoOffset, Smi::FromInt(0));
223  WRITE_FIELD(this, kDeoptimizationDataOffset, Smi::FromInt(0));
224  WRITE_FIELD(this, kSourcePositionTableOffset, Smi::FromInt(0));
225  WRITE_FIELD(this, kCodeDataContainerOffset, Smi::FromInt(0));
226 }
227 
228 void Code::clear_padding() {
229  memset(reinterpret_cast<void*>(address() + kHeaderPaddingStart), 0,
230  kHeaderSize - kHeaderPaddingStart);
231  Address data_end =
232  has_unwinding_info() ? unwinding_info_end() : raw_instruction_end();
233  memset(reinterpret_cast<void*>(data_end), 0,
234  CodeSize() - (data_end - address()));
235 }
236 
237 ByteArray Code::SourcePositionTable() const {
238  Object* maybe_table = source_position_table();
239  if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
240  DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
241  return SourcePositionTableWithFrameCache::cast(maybe_table)
242  ->source_position_table();
243 }
244 
245 uint32_t Code::stub_key() const {
246  DCHECK(is_stub());
247  return READ_UINT32_FIELD(this, kStubKeyOffset);
248 }
249 
250 void Code::set_stub_key(uint32_t key) {
251  DCHECK(is_stub() || key == 0); // Allow zero initialization.
252  WRITE_UINT32_FIELD(this, kStubKeyOffset, key);
253 }
254 
255 Object* Code::next_code_link() const {
256  return code_data_container()->next_code_link();
257 }
258 
259 void Code::set_next_code_link(Object* value) {
260  code_data_container()->set_next_code_link(value);
261 }
262 
263 int Code::InstructionSize() const {
264  if (is_off_heap_trampoline()) {
265  DCHECK(FLAG_embedded_builtins);
266  return OffHeapInstructionSize();
267  }
268  return raw_instruction_size();
269 }
270 
271 Address Code::raw_instruction_start() const {
272  return FIELD_ADDR(this, kHeaderSize);
273 }
274 
275 Address Code::InstructionStart() const {
276  if (is_off_heap_trampoline()) {
277  DCHECK(FLAG_embedded_builtins);
278  return OffHeapInstructionStart();
279  }
280  return raw_instruction_start();
281 }
282 
283 Address Code::raw_instruction_end() const {
284  return raw_instruction_start() + raw_instruction_size();
285 }
286 
287 Address Code::InstructionEnd() const {
288  if (is_off_heap_trampoline()) {
289  DCHECK(FLAG_embedded_builtins);
290  return OffHeapInstructionEnd();
291  }
292  return raw_instruction_end();
293 }
294 
295 int Code::GetUnwindingInfoSizeOffset() const {
296  DCHECK(has_unwinding_info());
297  return RoundUp(kHeaderSize + raw_instruction_size(), kInt64Size);
298 }
299 
300 int Code::unwinding_info_size() const {
301  DCHECK(has_unwinding_info());
302  return static_cast<int>(
303  READ_UINT64_FIELD(this, GetUnwindingInfoSizeOffset()));
304 }
305 
306 void Code::set_unwinding_info_size(int value) {
307  DCHECK(has_unwinding_info());
308  WRITE_UINT64_FIELD(this, GetUnwindingInfoSizeOffset(), value);
309 }
310 
311 Address Code::unwinding_info_start() const {
312  DCHECK(has_unwinding_info());
313  return FIELD_ADDR(this, GetUnwindingInfoSizeOffset()) + kInt64Size;
314 }
315 
316 Address Code::unwinding_info_end() const {
317  DCHECK(has_unwinding_info());
318  return unwinding_info_start() + unwinding_info_size();
319 }
320 
321 int Code::body_size() const {
322  int unpadded_body_size =
323  has_unwinding_info()
324  ? static_cast<int>(unwinding_info_end() - raw_instruction_start())
325  : raw_instruction_size();
326  return RoundUp(unpadded_body_size, kObjectAlignment);
327 }
328 
329 int Code::SizeIncludingMetadata() const {
330  int size = CodeSize();
331  size += relocation_info()->Size();
332  size += deoptimization_data()->Size();
333  return size;
334 }
335 
336 ByteArray Code::unchecked_relocation_info() const {
337  return ByteArray::unchecked_cast(READ_FIELD(this, kRelocationInfoOffset));
338 }
339 
340 byte* Code::relocation_start() const {
341  return unchecked_relocation_info()->GetDataStartAddress();
342 }
343 
344 byte* Code::relocation_end() const {
345  return unchecked_relocation_info()->GetDataEndAddress();
346 }
347 
348 int Code::relocation_size() const {
349  return unchecked_relocation_info()->length();
350 }
351 
352 Address Code::entry() const { return raw_instruction_start(); }
353 
354 bool Code::contains(Address inner_pointer) {
355  if (is_off_heap_trampoline()) {
356  DCHECK(FLAG_embedded_builtins);
357  if (OffHeapInstructionStart() <= inner_pointer &&
358  inner_pointer < OffHeapInstructionEnd()) {
359  return true;
360  }
361  }
362  return (address() <= inner_pointer) && (inner_pointer < address() + Size());
363 }
364 
365 int Code::ExecutableSize() const {
366  // Check that the assumptions about the layout of the code object holds.
367  DCHECK_EQ(static_cast<int>(raw_instruction_start() - address()),
368  Code::kHeaderSize);
369  return raw_instruction_size() + Code::kHeaderSize;
370 }
371 
372 // static
373 void Code::CopyRelocInfoToByteArray(ByteArray dest, const CodeDesc& desc) {
374  DCHECK_EQ(dest->length(), desc.reloc_size);
375  CopyBytes(dest->GetDataStartAddress(),
376  desc.buffer + desc.buffer_size - desc.reloc_size,
377  static_cast<size_t>(desc.reloc_size));
378 }
379 
380 int Code::CodeSize() const { return SizeFor(body_size()); }
381 
382 Code::Kind Code::kind() const {
383  return KindField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
384 }
385 
386 void Code::initialize_flags(Kind kind, bool has_unwinding_info,
387  bool is_turbofanned, int stack_slots,
388  bool is_off_heap_trampoline) {
389  CHECK(0 <= stack_slots && stack_slots < StackSlotsField::kMax);
390  static_assert(Code::NUMBER_OF_KINDS <= KindField::kMax + 1, "field overflow");
391  uint32_t flags = HasUnwindingInfoField::encode(has_unwinding_info) |
392  KindField::encode(kind) |
393  IsTurbofannedField::encode(is_turbofanned) |
394  StackSlotsField::encode(stack_slots) |
395  IsOffHeapTrampoline::encode(is_off_heap_trampoline);
396  WRITE_UINT32_FIELD(this, kFlagsOffset, flags);
397  DCHECK_IMPLIES(stack_slots != 0, has_safepoint_info());
398 }
399 
400 inline bool Code::is_interpreter_trampoline_builtin() const {
401  bool is_interpreter_trampoline =
402  (builtin_index() == Builtins::kInterpreterEntryTrampoline ||
403  builtin_index() == Builtins::kInterpreterEnterBytecodeAdvance ||
404  builtin_index() == Builtins::kInterpreterEnterBytecodeDispatch);
405  return is_interpreter_trampoline;
406 }
407 
408 inline bool Code::checks_optimization_marker() const {
409  bool checks_marker =
410  (builtin_index() == Builtins::kCompileLazy ||
411  builtin_index() == Builtins::kInterpreterEntryTrampoline);
412  return checks_marker ||
413  (kind() == OPTIMIZED_FUNCTION && marked_for_deoptimization());
414 }
415 
416 inline bool Code::has_tagged_params() const {
417  return kind() != JS_TO_WASM_FUNCTION && kind() != C_WASM_ENTRY &&
418  kind() != WASM_FUNCTION;
419 }
420 
421 inline bool Code::has_unwinding_info() const {
422  return HasUnwindingInfoField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
423 }
424 
425 inline bool Code::is_turbofanned() const {
426  return IsTurbofannedField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
427 }
428 
429 inline bool Code::can_have_weak_objects() const {
430  DCHECK(kind() == OPTIMIZED_FUNCTION);
431  int32_t flags = code_data_container()->kind_specific_flags();
432  return CanHaveWeakObjectsField::decode(flags);
433 }
434 
435 inline void Code::set_can_have_weak_objects(bool value) {
436  DCHECK(kind() == OPTIMIZED_FUNCTION);
437  int32_t previous = code_data_container()->kind_specific_flags();
438  int32_t updated = CanHaveWeakObjectsField::update(previous, value);
439  code_data_container()->set_kind_specific_flags(updated);
440 }
441 
442 inline bool Code::is_construct_stub() const {
443  DCHECK(kind() == BUILTIN);
444  int32_t flags = code_data_container()->kind_specific_flags();
445  return IsConstructStubField::decode(flags);
446 }
447 
448 inline void Code::set_is_construct_stub(bool value) {
449  DCHECK(kind() == BUILTIN);
450  int32_t previous = code_data_container()->kind_specific_flags();
451  int32_t updated = IsConstructStubField::update(previous, value);
452  code_data_container()->set_kind_specific_flags(updated);
453 }
454 
455 inline bool Code::is_promise_rejection() const {
456  DCHECK(kind() == BUILTIN);
457  int32_t flags = code_data_container()->kind_specific_flags();
458  return IsPromiseRejectionField::decode(flags);
459 }
460 
461 inline void Code::set_is_promise_rejection(bool value) {
462  DCHECK(kind() == BUILTIN);
463  int32_t previous = code_data_container()->kind_specific_flags();
464  int32_t updated = IsPromiseRejectionField::update(previous, value);
465  code_data_container()->set_kind_specific_flags(updated);
466 }
467 
468 inline bool Code::is_exception_caught() const {
469  DCHECK(kind() == BUILTIN);
470  int32_t flags = code_data_container()->kind_specific_flags();
471  return IsExceptionCaughtField::decode(flags);
472 }
473 
474 inline void Code::set_is_exception_caught(bool value) {
475  DCHECK(kind() == BUILTIN);
476  int32_t previous = code_data_container()->kind_specific_flags();
477  int32_t updated = IsExceptionCaughtField::update(previous, value);
478  code_data_container()->set_kind_specific_flags(updated);
479 }
480 
481 inline bool Code::is_off_heap_trampoline() const {
482  return IsOffHeapTrampoline::decode(READ_UINT32_FIELD(this, kFlagsOffset));
483 }
484 
485 inline HandlerTable::CatchPrediction Code::GetBuiltinCatchPrediction() {
486  if (is_promise_rejection()) return HandlerTable::PROMISE;
487  if (is_exception_caught()) return HandlerTable::CAUGHT;
488  return HandlerTable::UNCAUGHT;
489 }
490 
491 int Code::builtin_index() const {
492  int index = READ_INT_FIELD(this, kBuiltinIndexOffset);
493  DCHECK(index == -1 || Builtins::IsBuiltinId(index));
494  return index;
495 }
496 
497 void Code::set_builtin_index(int index) {
498  DCHECK(index == -1 || Builtins::IsBuiltinId(index));
499  WRITE_INT_FIELD(this, kBuiltinIndexOffset, index);
500 }
501 
502 bool Code::is_builtin() const { return builtin_index() != -1; }
503 
504 bool Code::has_safepoint_info() const {
505  return is_turbofanned() || is_wasm_code();
506 }
507 
508 int Code::stack_slots() const {
509  DCHECK(has_safepoint_info());
510  return StackSlotsField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
511 }
512 
513 int Code::safepoint_table_offset() const {
514  DCHECK(has_safepoint_info());
515  return READ_INT32_FIELD(this, kSafepointTableOffsetOffset);
516 }
517 
518 void Code::set_safepoint_table_offset(int offset) {
519  CHECK_LE(0, offset);
520  DCHECK(has_safepoint_info() || offset == 0); // Allow zero initialization.
521  DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
522  WRITE_INT32_FIELD(this, kSafepointTableOffsetOffset, offset);
523 }
524 
525 bool Code::marked_for_deoptimization() const {
526  DCHECK(kind() == OPTIMIZED_FUNCTION);
527  int32_t flags = code_data_container()->kind_specific_flags();
528  return MarkedForDeoptimizationField::decode(flags);
529 }
530 
531 void Code::set_marked_for_deoptimization(bool flag) {
532  DCHECK(kind() == OPTIMIZED_FUNCTION);
533  DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
534  int32_t previous = code_data_container()->kind_specific_flags();
535  int32_t updated = MarkedForDeoptimizationField::update(previous, flag);
536  code_data_container()->set_kind_specific_flags(updated);
537 }
538 
539 bool Code::embedded_objects_cleared() const {
540  DCHECK(kind() == OPTIMIZED_FUNCTION);
541  int32_t flags = code_data_container()->kind_specific_flags();
542  return EmbeddedObjectsClearedField::decode(flags);
543 }
544 
545 void Code::set_embedded_objects_cleared(bool flag) {
546  DCHECK(kind() == OPTIMIZED_FUNCTION);
547  DCHECK_IMPLIES(flag, marked_for_deoptimization());
548  int32_t previous = code_data_container()->kind_specific_flags();
549  int32_t updated = EmbeddedObjectsClearedField::update(previous, flag);
550  code_data_container()->set_kind_specific_flags(updated);
551 }
552 
553 bool Code::deopt_already_counted() const {
554  DCHECK(kind() == OPTIMIZED_FUNCTION);
555  int32_t flags = code_data_container()->kind_specific_flags();
556  return DeoptAlreadyCountedField::decode(flags);
557 }
558 
559 void Code::set_deopt_already_counted(bool flag) {
560  DCHECK(kind() == OPTIMIZED_FUNCTION);
561  DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
562  int32_t previous = code_data_container()->kind_specific_flags();
563  int32_t updated = DeoptAlreadyCountedField::update(previous, flag);
564  code_data_container()->set_kind_specific_flags(updated);
565 }
566 
567 bool Code::is_stub() const { return kind() == STUB; }
568 bool Code::is_optimized_code() const { return kind() == OPTIMIZED_FUNCTION; }
569 bool Code::is_wasm_code() const { return kind() == WASM_FUNCTION; }
570 
571 int Code::constant_pool_offset() const {
572  if (!FLAG_enable_embedded_constant_pool) return InstructionSize();
573  return READ_INT_FIELD(this, kConstantPoolOffset);
574 }
575 
576 void Code::set_constant_pool_offset(int value) {
577  if (!FLAG_enable_embedded_constant_pool) return;
578  WRITE_INT_FIELD(this, kConstantPoolOffset, value);
579 }
580 
581 Address Code::constant_pool() const {
582  if (FLAG_enable_embedded_constant_pool) {
583  int offset = constant_pool_offset();
584  if (offset < InstructionSize()) {
585  return InstructionStart() + offset;
586  }
587  }
588  return kNullAddress;
589 }
590 
591 Code Code::GetCodeFromTargetAddress(Address address) {
592  {
593  // TODO(jgruber,v8:6666): Support embedded builtins here. We'd need to pass
594  // in the current isolate.
595  Address start = reinterpret_cast<Address>(Isolate::CurrentEmbeddedBlob());
596  Address end = start + Isolate::CurrentEmbeddedBlobSize();
597  CHECK(address < start || address >= end);
598  }
599 
600  HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
601  // Unchecked cast because we can't rely on the map currently
602  // not being a forwarding pointer.
603  return Code::unchecked_cast(code);
604 }
605 
606 Object* Code::GetObjectFromCodeEntry(Address code_entry) {
607  return HeapObject::FromAddress(code_entry - Code::kHeaderSize);
608 }
609 
610 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
611  return GetObjectFromCodeEntry(Memory<Address>(location_of_address));
612 }
613 
614 bool Code::CanContainWeakObjects() {
615  return is_optimized_code() && can_have_weak_objects();
616 }
617 
618 bool Code::IsWeakObject(HeapObject* object) {
619  return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
620 }
621 
622 bool Code::IsWeakObjectInOptimizedCode(HeapObject* object) {
623  Map map = object->synchronized_map();
624  InstanceType instance_type = map->instance_type();
625  if (InstanceTypeChecker::IsMap(instance_type)) {
626  return Map::cast(object)->CanTransition();
627  }
628  return InstanceTypeChecker::IsPropertyCell(instance_type) ||
629  InstanceTypeChecker::IsJSReceiver(instance_type) ||
630  InstanceTypeChecker::IsContext(instance_type);
631 }
632 
633 // This field has to have relaxed atomic accessors because it is accessed in the
634 // concurrent marker.
635 RELAXED_INT32_ACCESSORS(CodeDataContainer, kind_specific_flags,
636  kKindSpecificFlagsOffset)
637 ACCESSORS(CodeDataContainer, next_code_link, Object, kNextCodeLinkOffset)
638 
639 void CodeDataContainer::clear_padding() {
640  memset(reinterpret_cast<void*>(address() + kUnalignedSize), 0,
641  kSize - kUnalignedSize);
642 }
643 
644 byte BytecodeArray::get(int index) {
645  DCHECK(index >= 0 && index < this->length());
646  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
647 }
648 
649 void BytecodeArray::set(int index, byte value) {
650  DCHECK(index >= 0 && index < this->length());
651  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
652 }
653 
654 void BytecodeArray::set_frame_size(int frame_size) {
655  DCHECK_GE(frame_size, 0);
656  DCHECK(IsAligned(frame_size, kSystemPointerSize));
657  WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
658 }
659 
660 int BytecodeArray::frame_size() const {
661  return READ_INT_FIELD(this, kFrameSizeOffset);
662 }
663 
664 int BytecodeArray::register_count() const {
665  return frame_size() / kSystemPointerSize;
666 }
667 
668 void BytecodeArray::set_parameter_count(int number_of_parameters) {
669  DCHECK_GE(number_of_parameters, 0);
670  // Parameter count is stored as the size on stack of the parameters to allow
671  // it to be used directly by generated code.
672  WRITE_INT_FIELD(this, kParameterSizeOffset,
673  (number_of_parameters << kSystemPointerSizeLog2));
674 }
675 
676 interpreter::Register BytecodeArray::incoming_new_target_or_generator_register()
677  const {
678  int register_operand =
679  READ_INT_FIELD(this, kIncomingNewTargetOrGeneratorRegisterOffset);
680  if (register_operand == 0) {
681  return interpreter::Register::invalid_value();
682  } else {
683  return interpreter::Register::FromOperand(register_operand);
684  }
685 }
686 
687 void BytecodeArray::set_incoming_new_target_or_generator_register(
688  interpreter::Register incoming_new_target_or_generator_register) {
689  if (!incoming_new_target_or_generator_register.is_valid()) {
690  WRITE_INT_FIELD(this, kIncomingNewTargetOrGeneratorRegisterOffset, 0);
691  } else {
692  DCHECK(incoming_new_target_or_generator_register.index() <
693  register_count());
694  DCHECK_NE(0, incoming_new_target_or_generator_register.ToOperand());
695  WRITE_INT_FIELD(this, kIncomingNewTargetOrGeneratorRegisterOffset,
696  incoming_new_target_or_generator_register.ToOperand());
697  }
698 }
699 
700 int BytecodeArray::interrupt_budget() const {
701  return READ_INT_FIELD(this, kInterruptBudgetOffset);
702 }
703 
704 void BytecodeArray::set_interrupt_budget(int interrupt_budget) {
705  DCHECK_GE(interrupt_budget, 0);
706  WRITE_INT_FIELD(this, kInterruptBudgetOffset, interrupt_budget);
707 }
708 
709 int BytecodeArray::osr_loop_nesting_level() const {
710  return READ_INT8_FIELD(this, kOSRNestingLevelOffset);
711 }
712 
713 void BytecodeArray::set_osr_loop_nesting_level(int depth) {
714  DCHECK(0 <= depth && depth <= AbstractCode::kMaxLoopNestingMarker);
715  STATIC_ASSERT(AbstractCode::kMaxLoopNestingMarker < kMaxInt8);
716  WRITE_INT8_FIELD(this, kOSRNestingLevelOffset, depth);
717 }
718 
719 BytecodeArray::Age BytecodeArray::bytecode_age() const {
720  // Bytecode is aged by the concurrent marker.
721  return static_cast<Age>(RELAXED_READ_INT8_FIELD(this, kBytecodeAgeOffset));
722 }
723 
724 void BytecodeArray::set_bytecode_age(BytecodeArray::Age age) {
725  DCHECK_GE(age, kFirstBytecodeAge);
726  DCHECK_LE(age, kLastBytecodeAge);
727  STATIC_ASSERT(kLastBytecodeAge <= kMaxInt8);
728  // Bytecode is aged by the concurrent marker.
729  RELAXED_WRITE_INT8_FIELD(this, kBytecodeAgeOffset, static_cast<int8_t>(age));
730 }
731 
732 int BytecodeArray::parameter_count() const {
733  // Parameter count is stored as the size on stack of the parameters to allow
734  // it to be used directly by generated code.
735  return READ_INT_FIELD(this, kParameterSizeOffset) >> kSystemPointerSizeLog2;
736 }
737 
738 ACCESSORS2(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
739 ACCESSORS2(BytecodeArray, handler_table, ByteArray, kHandlerTableOffset)
740 ACCESSORS(BytecodeArray, source_position_table, Object,
741  kSourcePositionTableOffset)
742 
743 void BytecodeArray::clear_padding() {
744  int data_size = kHeaderSize + length();
745  memset(reinterpret_cast<void*>(address() + data_size), 0,
746  SizeFor(length()) - data_size);
747 }
748 
749 Address BytecodeArray::GetFirstBytecodeAddress() {
750  return ptr() - kHeapObjectTag + kHeaderSize;
751 }
752 
753 ByteArray BytecodeArray::SourcePositionTable() {
754  Object* maybe_table = source_position_table();
755  if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
756  DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
757  return SourcePositionTableWithFrameCache::cast(maybe_table)
758  ->source_position_table();
759 }
760 
761 void BytecodeArray::ClearFrameCacheFromSourcePositionTable() {
762  Object* maybe_table = source_position_table();
763  if (maybe_table->IsByteArray()) return;
764  DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
765  set_source_position_table(SourcePositionTableWithFrameCache::cast(maybe_table)
766  ->source_position_table());
767 }
768 
769 int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
770 
771 int BytecodeArray::SizeIncludingMetadata() {
772  int size = BytecodeArraySize();
773  size += constant_pool()->Size();
774  size += handler_table()->Size();
775  size += SourcePositionTable()->Size();
776  return size;
777 }
778 
779 BailoutId DeoptimizationData::BytecodeOffset(int i) {
780  return BailoutId(BytecodeOffsetRaw(i)->value());
781 }
782 
783 void DeoptimizationData::SetBytecodeOffset(int i, BailoutId value) {
784  SetBytecodeOffsetRaw(i, Smi::FromInt(value.ToInt()));
785 }
786 
787 int DeoptimizationData::DeoptCount() {
788  return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
789 }
790 
791 } // namespace internal
792 } // namespace v8
793 
794 #include "src/objects/object-macros-undef.h"
795 
796 #endif // V8_OBJECTS_CODE_INL_H_
Definition: libplatform.h:13