5 #ifndef V8_OBJECTS_CODE_INL_H_ 6 #define V8_OBJECTS_CODE_INL_H_ 8 #include "src/objects/code.h" 10 #include "src/interpreter/bytecode-register.h" 11 #include "src/isolate.h" 12 #include "src/objects/dictionary.h" 13 #include "src/objects/instance-type-inl.h" 14 #include "src/objects/map-inl.h" 15 #include "src/objects/maybe-object-inl.h" 16 #include "src/objects/smi-inl.h" 17 #include "src/v8memory.h" 20 #include "src/objects/object-macros.h" 25 OBJECT_CONSTRUCTORS_IMPL(DeoptimizationData, FixedArray)
26 OBJECT_CONSTRUCTORS_IMPL(BytecodeArray, FixedArrayBase)
27 OBJECT_CONSTRUCTORS_IMPL(AbstractCode, HeapObjectPtr)
29 NEVER_READ_ONLY_SPACE_IMPL(AbstractCode)
31 CAST_ACCESSOR2(AbstractCode)
32 CAST_ACCESSOR2(BytecodeArray)
34 CAST_ACCESSOR(CodeDataContainer)
35 CAST_ACCESSOR(DependentCode)
36 CAST_ACCESSOR2(DeoptimizationData)
37 CAST_ACCESSOR(SourcePositionTableWithFrameCache)
39 ACCESSORS2(SourcePositionTableWithFrameCache, source_position_table, ByteArray,
40 kSourcePositionTableIndex)
41 ACCESSORS2(SourcePositionTableWithFrameCache, stack_frame_cache,
42 SimpleNumberDictionary, kStackFrameCacheIndex)
44 int AbstractCode::raw_instruction_size() {
46 return GetCode()->raw_instruction_size();
48 return GetBytecodeArray()->length();
52 int AbstractCode::InstructionSize() {
54 return GetCode()->InstructionSize();
56 return GetBytecodeArray()->length();
60 ByteArray AbstractCode::source_position_table() {
62 return GetCode()->SourcePositionTable();
64 return GetBytecodeArray()->SourcePositionTable();
68 Object* AbstractCode::stack_frame_cache() {
71 maybe_table = GetCode()->source_position_table();
73 maybe_table = GetBytecodeArray()->source_position_table();
75 if (maybe_table->IsSourcePositionTableWithFrameCache()) {
76 return SourcePositionTableWithFrameCache::cast(maybe_table)
77 ->stack_frame_cache();
82 int AbstractCode::SizeIncludingMetadata() {
84 return GetCode()->SizeIncludingMetadata();
86 return GetBytecodeArray()->SizeIncludingMetadata();
89 int AbstractCode::ExecutableSize() {
91 return GetCode()->ExecutableSize();
93 return GetBytecodeArray()->BytecodeArraySize();
97 Address AbstractCode::raw_instruction_start() {
99 return GetCode()->raw_instruction_start();
101 return GetBytecodeArray()->GetFirstBytecodeAddress();
105 Address AbstractCode::InstructionStart() {
107 return GetCode()->InstructionStart();
109 return GetBytecodeArray()->GetFirstBytecodeAddress();
113 Address AbstractCode::raw_instruction_end() {
115 return GetCode()->raw_instruction_end();
117 return GetBytecodeArray()->GetFirstBytecodeAddress() +
118 GetBytecodeArray()->length();
122 Address AbstractCode::InstructionEnd() {
124 return GetCode()->InstructionEnd();
126 return GetBytecodeArray()->GetFirstBytecodeAddress() +
127 GetBytecodeArray()->length();
131 bool AbstractCode::contains(Address inner_pointer) {
132 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
135 AbstractCode::Kind AbstractCode::kind() {
137 return static_cast<AbstractCode::Kind
>(GetCode()->kind());
139 return INTERPRETED_FUNCTION;
143 Code AbstractCode::GetCode() {
return Code::cast(*
this); }
145 BytecodeArray AbstractCode::GetBytecodeArray() {
146 return BytecodeArray::cast(*
this);
149 DependentCode* DependentCode::next_link() {
150 return DependentCode::cast(Get(kNextLinkIndex)->GetHeapObjectAssumeStrong());
153 void DependentCode::set_next_link(DependentCode* next) {
154 Set(kNextLinkIndex, HeapObjectReference::Strong(next));
157 int DependentCode::flags() {
return Smi::ToInt(Get(kFlagsIndex)->ToSmi()); }
159 void DependentCode::set_flags(
int flags) {
160 Set(kFlagsIndex, MaybeObject::FromObject(Smi::FromInt(flags)));
163 int DependentCode::count() {
return CountField::decode(flags()); }
165 void DependentCode::set_count(
int value) {
166 set_flags(CountField::update(flags(), value));
169 DependentCode::DependencyGroup DependentCode::group() {
170 return static_cast<DependencyGroup
>(GroupField::decode(flags()));
173 void DependentCode::set_object_at(
int i, MaybeObject
object) {
174 Set(kCodesStartIndex +
i,
object);
177 MaybeObject DependentCode::object_at(
int i) {
178 return Get(kCodesStartIndex +
i);
181 void DependentCode::clear_at(
int i) {
182 Set(kCodesStartIndex +
i,
183 HeapObjectReference::Strong(GetReadOnlyRoots().undefined_value()));
186 void DependentCode::copy(
int from,
int to) {
187 Set(kCodesStartIndex + to, Get(kCodesStartIndex + from));
190 OBJECT_CONSTRUCTORS_IMPL(Code, HeapObjectPtr)
191 NEVER_READ_ONLY_SPACE_IMPL(Code)
193 INT_ACCESSORS(Code, raw_instruction_size, kInstructionSizeOffset)
194 INT_ACCESSORS(Code, handler_table_offset, kHandlerTableOffsetOffset)
195 #define CODE_ACCESSORS(name, type, offset) \ 196 ACCESSORS_CHECKED2(Code, name, type, offset, true, !Heap::InNewSpace(value)) 197 #define CODE_ACCESSORS2(name, type, offset) \ 198 ACCESSORS_CHECKED3(Code, name, type, offset, true, !Heap::InNewSpace(value)) 201 #define SYNCHRONIZED_CODE_ACCESSORS(name, type, offset) \ 202 type* Code::name() const { \ 203 type* value = type::cast(ACQUIRE_READ_FIELD(this, offset)); \ 206 void Code::set_##name(type* value, WriteBarrierMode mode) { \ 207 DCHECK(!Heap::InNewSpace(value)); \ 208 RELEASE_WRITE_FIELD(this, offset, value); \ 209 CONDITIONAL_WRITE_BARRIER(this, offset, value, mode); \ 211 CODE_ACCESSORS2(relocation_info, ByteArray, kRelocationInfoOffset)
212 CODE_ACCESSORS2(deoptimization_data, FixedArray, kDeoptimizationDataOffset)
213 CODE_ACCESSORS(source_position_table, Object, kSourcePositionTableOffset)
215 SYNCHRONIZED_CODE_ACCESSORS(code_data_container, CodeDataContainer,
216 kCodeDataContainerOffset)
217 #undef CODE_ACCESSORS 218 #undef CODE_ACCESSORS2 219 #undef SYNCHRONIZED_CODE_ACCESSORS 221 void Code::WipeOutHeader() {
222 WRITE_FIELD(
this, kRelocationInfoOffset, Smi::FromInt(0));
223 WRITE_FIELD(
this, kDeoptimizationDataOffset, Smi::FromInt(0));
224 WRITE_FIELD(
this, kSourcePositionTableOffset, Smi::FromInt(0));
225 WRITE_FIELD(
this, kCodeDataContainerOffset, Smi::FromInt(0));
228 void Code::clear_padding() {
229 memset(reinterpret_cast<void*>(address() + kHeaderPaddingStart), 0,
230 kHeaderSize - kHeaderPaddingStart);
232 has_unwinding_info() ? unwinding_info_end() : raw_instruction_end();
233 memset(reinterpret_cast<void*>(data_end), 0,
234 CodeSize() - (data_end - address()));
237 ByteArray Code::SourcePositionTable()
const {
238 Object* maybe_table = source_position_table();
239 if (maybe_table->IsByteArray())
return ByteArray::cast(maybe_table);
240 DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
241 return SourcePositionTableWithFrameCache::cast(maybe_table)
242 ->source_position_table();
247 return READ_UINT32_FIELD(
this, kStubKeyOffset);
250 void Code::set_stub_key(
uint32_t key) {
251 DCHECK(is_stub() || key == 0);
252 WRITE_UINT32_FIELD(
this, kStubKeyOffset, key);
255 Object* Code::next_code_link()
const {
256 return code_data_container()->next_code_link();
259 void Code::set_next_code_link(Object* value) {
260 code_data_container()->set_next_code_link(value);
263 int Code::InstructionSize()
const {
264 if (is_off_heap_trampoline()) {
265 DCHECK(FLAG_embedded_builtins);
266 return OffHeapInstructionSize();
268 return raw_instruction_size();
271 Address Code::raw_instruction_start()
const {
272 return FIELD_ADDR(
this, kHeaderSize);
275 Address Code::InstructionStart()
const {
276 if (is_off_heap_trampoline()) {
277 DCHECK(FLAG_embedded_builtins);
278 return OffHeapInstructionStart();
280 return raw_instruction_start();
283 Address Code::raw_instruction_end()
const {
284 return raw_instruction_start() + raw_instruction_size();
287 Address Code::InstructionEnd()
const {
288 if (is_off_heap_trampoline()) {
289 DCHECK(FLAG_embedded_builtins);
290 return OffHeapInstructionEnd();
292 return raw_instruction_end();
295 int Code::GetUnwindingInfoSizeOffset()
const {
296 DCHECK(has_unwinding_info());
297 return RoundUp(kHeaderSize + raw_instruction_size(), kInt64Size);
300 int Code::unwinding_info_size()
const {
301 DCHECK(has_unwinding_info());
302 return static_cast<int>(
303 READ_UINT64_FIELD(
this, GetUnwindingInfoSizeOffset()));
306 void Code::set_unwinding_info_size(
int value) {
307 DCHECK(has_unwinding_info());
308 WRITE_UINT64_FIELD(
this, GetUnwindingInfoSizeOffset(), value);
311 Address Code::unwinding_info_start()
const {
312 DCHECK(has_unwinding_info());
313 return FIELD_ADDR(
this, GetUnwindingInfoSizeOffset()) + kInt64Size;
316 Address Code::unwinding_info_end()
const {
317 DCHECK(has_unwinding_info());
318 return unwinding_info_start() + unwinding_info_size();
321 int Code::body_size()
const {
322 int unpadded_body_size =
324 ?
static_cast<int>(unwinding_info_end() - raw_instruction_start())
325 : raw_instruction_size();
326 return RoundUp(unpadded_body_size, kObjectAlignment);
329 int Code::SizeIncludingMetadata()
const {
330 int size = CodeSize();
331 size += relocation_info()->Size();
332 size += deoptimization_data()->Size();
336 ByteArray Code::unchecked_relocation_info()
const {
337 return ByteArray::unchecked_cast(READ_FIELD(
this, kRelocationInfoOffset));
340 byte* Code::relocation_start()
const {
341 return unchecked_relocation_info()->GetDataStartAddress();
344 byte* Code::relocation_end()
const {
345 return unchecked_relocation_info()->GetDataEndAddress();
348 int Code::relocation_size()
const {
349 return unchecked_relocation_info()->length();
352 Address Code::entry()
const {
return raw_instruction_start(); }
354 bool Code::contains(Address inner_pointer) {
355 if (is_off_heap_trampoline()) {
356 DCHECK(FLAG_embedded_builtins);
357 if (OffHeapInstructionStart() <= inner_pointer &&
358 inner_pointer < OffHeapInstructionEnd()) {
362 return (address() <= inner_pointer) && (inner_pointer < address() + Size());
365 int Code::ExecutableSize()
const {
367 DCHECK_EQ(static_cast<int>(raw_instruction_start() - address()),
369 return raw_instruction_size() + Code::kHeaderSize;
373 void Code::CopyRelocInfoToByteArray(ByteArray dest,
const CodeDesc& desc) {
374 DCHECK_EQ(dest->length(), desc.reloc_size);
375 CopyBytes(dest->GetDataStartAddress(),
376 desc.buffer + desc.buffer_size - desc.reloc_size,
377 static_cast<size_t>(desc.reloc_size));
380 int Code::CodeSize()
const {
return SizeFor(body_size()); }
382 Code::Kind Code::kind()
const {
383 return KindField::decode(READ_UINT32_FIELD(
this, kFlagsOffset));
386 void Code::initialize_flags(Kind kind,
bool has_unwinding_info,
387 bool is_turbofanned,
int stack_slots,
388 bool is_off_heap_trampoline) {
389 CHECK(0 <= stack_slots && stack_slots < StackSlotsField::kMax);
390 static_assert(Code::NUMBER_OF_KINDS <= KindField::kMax + 1,
"field overflow");
391 uint32_t flags = HasUnwindingInfoField::encode(has_unwinding_info) |
392 KindField::encode(kind) |
393 IsTurbofannedField::encode(is_turbofanned) |
394 StackSlotsField::encode(stack_slots) |
395 IsOffHeapTrampoline::encode(is_off_heap_trampoline);
396 WRITE_UINT32_FIELD(
this, kFlagsOffset, flags);
397 DCHECK_IMPLIES(stack_slots != 0, has_safepoint_info());
400 inline bool Code::is_interpreter_trampoline_builtin()
const {
401 bool is_interpreter_trampoline =
402 (builtin_index() == Builtins::kInterpreterEntryTrampoline ||
403 builtin_index() == Builtins::kInterpreterEnterBytecodeAdvance ||
404 builtin_index() == Builtins::kInterpreterEnterBytecodeDispatch);
405 return is_interpreter_trampoline;
408 inline bool Code::checks_optimization_marker()
const {
410 (builtin_index() == Builtins::kCompileLazy ||
411 builtin_index() == Builtins::kInterpreterEntryTrampoline);
412 return checks_marker ||
413 (kind() == OPTIMIZED_FUNCTION && marked_for_deoptimization());
416 inline bool Code::has_tagged_params()
const {
417 return kind() != JS_TO_WASM_FUNCTION && kind() != C_WASM_ENTRY &&
418 kind() != WASM_FUNCTION;
421 inline bool Code::has_unwinding_info()
const {
422 return HasUnwindingInfoField::decode(READ_UINT32_FIELD(
this, kFlagsOffset));
425 inline bool Code::is_turbofanned()
const {
426 return IsTurbofannedField::decode(READ_UINT32_FIELD(
this, kFlagsOffset));
429 inline bool Code::can_have_weak_objects()
const {
430 DCHECK(kind() == OPTIMIZED_FUNCTION);
431 int32_t flags = code_data_container()->kind_specific_flags();
432 return CanHaveWeakObjectsField::decode(flags);
435 inline void Code::set_can_have_weak_objects(
bool value) {
436 DCHECK(kind() == OPTIMIZED_FUNCTION);
437 int32_t previous = code_data_container()->kind_specific_flags();
438 int32_t updated = CanHaveWeakObjectsField::update(previous, value);
439 code_data_container()->set_kind_specific_flags(updated);
442 inline bool Code::is_construct_stub()
const {
443 DCHECK(kind() == BUILTIN);
444 int32_t flags = code_data_container()->kind_specific_flags();
445 return IsConstructStubField::decode(flags);
448 inline void Code::set_is_construct_stub(
bool value) {
449 DCHECK(kind() == BUILTIN);
450 int32_t previous = code_data_container()->kind_specific_flags();
451 int32_t updated = IsConstructStubField::update(previous, value);
452 code_data_container()->set_kind_specific_flags(updated);
455 inline bool Code::is_promise_rejection()
const {
456 DCHECK(kind() == BUILTIN);
457 int32_t flags = code_data_container()->kind_specific_flags();
458 return IsPromiseRejectionField::decode(flags);
461 inline void Code::set_is_promise_rejection(
bool value) {
462 DCHECK(kind() == BUILTIN);
463 int32_t previous = code_data_container()->kind_specific_flags();
464 int32_t updated = IsPromiseRejectionField::update(previous, value);
465 code_data_container()->set_kind_specific_flags(updated);
468 inline bool Code::is_exception_caught()
const {
469 DCHECK(kind() == BUILTIN);
470 int32_t flags = code_data_container()->kind_specific_flags();
471 return IsExceptionCaughtField::decode(flags);
474 inline void Code::set_is_exception_caught(
bool value) {
475 DCHECK(kind() == BUILTIN);
476 int32_t previous = code_data_container()->kind_specific_flags();
477 int32_t updated = IsExceptionCaughtField::update(previous, value);
478 code_data_container()->set_kind_specific_flags(updated);
481 inline bool Code::is_off_heap_trampoline()
const {
482 return IsOffHeapTrampoline::decode(READ_UINT32_FIELD(
this, kFlagsOffset));
485 inline HandlerTable::CatchPrediction Code::GetBuiltinCatchPrediction() {
486 if (is_promise_rejection())
return HandlerTable::PROMISE;
487 if (is_exception_caught())
return HandlerTable::CAUGHT;
488 return HandlerTable::UNCAUGHT;
491 int Code::builtin_index()
const {
492 int index = READ_INT_FIELD(
this, kBuiltinIndexOffset);
493 DCHECK(index == -1 || Builtins::IsBuiltinId(index));
497 void Code::set_builtin_index(
int index) {
498 DCHECK(index == -1 || Builtins::IsBuiltinId(index));
499 WRITE_INT_FIELD(
this, kBuiltinIndexOffset, index);
502 bool Code::is_builtin()
const {
return builtin_index() != -1; }
504 bool Code::has_safepoint_info()
const {
505 return is_turbofanned() || is_wasm_code();
508 int Code::stack_slots()
const {
509 DCHECK(has_safepoint_info());
510 return StackSlotsField::decode(READ_UINT32_FIELD(
this, kFlagsOffset));
513 int Code::safepoint_table_offset()
const {
514 DCHECK(has_safepoint_info());
515 return READ_INT32_FIELD(
this, kSafepointTableOffsetOffset);
518 void Code::set_safepoint_table_offset(
int offset) {
520 DCHECK(has_safepoint_info() || offset == 0);
521 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
522 WRITE_INT32_FIELD(
this, kSafepointTableOffsetOffset, offset);
525 bool Code::marked_for_deoptimization()
const {
526 DCHECK(kind() == OPTIMIZED_FUNCTION);
527 int32_t flags = code_data_container()->kind_specific_flags();
528 return MarkedForDeoptimizationField::decode(flags);
531 void Code::set_marked_for_deoptimization(
bool flag) {
532 DCHECK(kind() == OPTIMIZED_FUNCTION);
533 DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
534 int32_t previous = code_data_container()->kind_specific_flags();
535 int32_t updated = MarkedForDeoptimizationField::update(previous, flag);
536 code_data_container()->set_kind_specific_flags(updated);
539 bool Code::embedded_objects_cleared()
const {
540 DCHECK(kind() == OPTIMIZED_FUNCTION);
541 int32_t flags = code_data_container()->kind_specific_flags();
542 return EmbeddedObjectsClearedField::decode(flags);
545 void Code::set_embedded_objects_cleared(
bool flag) {
546 DCHECK(kind() == OPTIMIZED_FUNCTION);
547 DCHECK_IMPLIES(flag, marked_for_deoptimization());
548 int32_t previous = code_data_container()->kind_specific_flags();
549 int32_t updated = EmbeddedObjectsClearedField::update(previous, flag);
550 code_data_container()->set_kind_specific_flags(updated);
553 bool Code::deopt_already_counted()
const {
554 DCHECK(kind() == OPTIMIZED_FUNCTION);
555 int32_t flags = code_data_container()->kind_specific_flags();
556 return DeoptAlreadyCountedField::decode(flags);
559 void Code::set_deopt_already_counted(
bool flag) {
560 DCHECK(kind() == OPTIMIZED_FUNCTION);
561 DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
562 int32_t previous = code_data_container()->kind_specific_flags();
563 int32_t updated = DeoptAlreadyCountedField::update(previous, flag);
564 code_data_container()->set_kind_specific_flags(updated);
567 bool Code::is_stub()
const {
return kind() == STUB; }
568 bool Code::is_optimized_code()
const {
return kind() == OPTIMIZED_FUNCTION; }
569 bool Code::is_wasm_code()
const {
return kind() == WASM_FUNCTION; }
571 int Code::constant_pool_offset()
const {
572 if (!FLAG_enable_embedded_constant_pool)
return InstructionSize();
573 return READ_INT_FIELD(
this, kConstantPoolOffset);
576 void Code::set_constant_pool_offset(
int value) {
577 if (!FLAG_enable_embedded_constant_pool)
return;
578 WRITE_INT_FIELD(
this, kConstantPoolOffset, value);
581 Address Code::constant_pool()
const {
582 if (FLAG_enable_embedded_constant_pool) {
583 int offset = constant_pool_offset();
584 if (offset < InstructionSize()) {
585 return InstructionStart() + offset;
591 Code Code::GetCodeFromTargetAddress(Address address) {
595 Address start =
reinterpret_cast<Address
>(Isolate::CurrentEmbeddedBlob());
596 Address end = start + Isolate::CurrentEmbeddedBlobSize();
597 CHECK(address < start || address >= end);
600 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
603 return Code::unchecked_cast(code);
606 Object* Code::GetObjectFromCodeEntry(Address code_entry) {
607 return HeapObject::FromAddress(code_entry - Code::kHeaderSize);
610 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
611 return GetObjectFromCodeEntry(Memory<Address>(location_of_address));
614 bool Code::CanContainWeakObjects() {
615 return is_optimized_code() && can_have_weak_objects();
618 bool Code::IsWeakObject(HeapObject*
object) {
619 return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(
object));
622 bool Code::IsWeakObjectInOptimizedCode(HeapObject*
object) {
623 Map map =
object->synchronized_map();
624 InstanceType instance_type = map->instance_type();
625 if (InstanceTypeChecker::IsMap(instance_type)) {
626 return Map::cast(
object)->CanTransition();
628 return InstanceTypeChecker::IsPropertyCell(instance_type) ||
629 InstanceTypeChecker::IsJSReceiver(instance_type) ||
630 InstanceTypeChecker::IsContext(instance_type);
635 RELAXED_INT32_ACCESSORS(CodeDataContainer, kind_specific_flags,
636 kKindSpecificFlagsOffset)
637 ACCESSORS(CodeDataContainer, next_code_link, Object, kNextCodeLinkOffset)
639 void CodeDataContainer::clear_padding() {
640 memset(reinterpret_cast<void*>(address() + kUnalignedSize), 0,
641 kSize - kUnalignedSize);
644 byte BytecodeArray::get(
int index) {
645 DCHECK(index >= 0 && index < this->length());
646 return READ_BYTE_FIELD(
this, kHeaderSize + index * kCharSize);
649 void BytecodeArray::set(
int index, byte value) {
650 DCHECK(index >= 0 && index < this->length());
651 WRITE_BYTE_FIELD(
this, kHeaderSize + index * kCharSize, value);
654 void BytecodeArray::set_frame_size(
int frame_size) {
655 DCHECK_GE(frame_size, 0);
656 DCHECK(IsAligned(frame_size, kSystemPointerSize));
657 WRITE_INT_FIELD(
this, kFrameSizeOffset, frame_size);
660 int BytecodeArray::frame_size()
const {
661 return READ_INT_FIELD(
this, kFrameSizeOffset);
664 int BytecodeArray::register_count()
const {
665 return frame_size() / kSystemPointerSize;
668 void BytecodeArray::set_parameter_count(
int number_of_parameters) {
669 DCHECK_GE(number_of_parameters, 0);
672 WRITE_INT_FIELD(
this, kParameterSizeOffset,
673 (number_of_parameters << kSystemPointerSizeLog2));
676 interpreter::Register BytecodeArray::incoming_new_target_or_generator_register()
678 int register_operand =
679 READ_INT_FIELD(
this, kIncomingNewTargetOrGeneratorRegisterOffset);
680 if (register_operand == 0) {
681 return interpreter::Register::invalid_value();
683 return interpreter::Register::FromOperand(register_operand);
687 void BytecodeArray::set_incoming_new_target_or_generator_register(
688 interpreter::Register incoming_new_target_or_generator_register) {
689 if (!incoming_new_target_or_generator_register.is_valid()) {
690 WRITE_INT_FIELD(
this, kIncomingNewTargetOrGeneratorRegisterOffset, 0);
692 DCHECK(incoming_new_target_or_generator_register.index() <
694 DCHECK_NE(0, incoming_new_target_or_generator_register.ToOperand());
695 WRITE_INT_FIELD(
this, kIncomingNewTargetOrGeneratorRegisterOffset,
696 incoming_new_target_or_generator_register.ToOperand());
700 int BytecodeArray::interrupt_budget()
const {
701 return READ_INT_FIELD(
this, kInterruptBudgetOffset);
704 void BytecodeArray::set_interrupt_budget(
int interrupt_budget) {
705 DCHECK_GE(interrupt_budget, 0);
706 WRITE_INT_FIELD(
this, kInterruptBudgetOffset, interrupt_budget);
709 int BytecodeArray::osr_loop_nesting_level()
const {
710 return READ_INT8_FIELD(
this, kOSRNestingLevelOffset);
713 void BytecodeArray::set_osr_loop_nesting_level(
int depth) {
714 DCHECK(0 <= depth && depth <= AbstractCode::kMaxLoopNestingMarker);
715 STATIC_ASSERT(AbstractCode::kMaxLoopNestingMarker < kMaxInt8);
716 WRITE_INT8_FIELD(
this, kOSRNestingLevelOffset, depth);
719 BytecodeArray::Age BytecodeArray::bytecode_age()
const {
721 return static_cast<Age
>(RELAXED_READ_INT8_FIELD(
this, kBytecodeAgeOffset));
724 void BytecodeArray::set_bytecode_age(BytecodeArray::Age age) {
725 DCHECK_GE(age, kFirstBytecodeAge);
726 DCHECK_LE(age, kLastBytecodeAge);
727 STATIC_ASSERT(kLastBytecodeAge <= kMaxInt8);
729 RELAXED_WRITE_INT8_FIELD(
this, kBytecodeAgeOffset, static_cast<int8_t>(age));
732 int BytecodeArray::parameter_count()
const {
735 return READ_INT_FIELD(
this, kParameterSizeOffset) >> kSystemPointerSizeLog2;
738 ACCESSORS2(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
739 ACCESSORS2(BytecodeArray, handler_table, ByteArray, kHandlerTableOffset)
740 ACCESSORS(BytecodeArray, source_position_table, Object,
741 kSourcePositionTableOffset)
743 void BytecodeArray::clear_padding() {
744 int data_size = kHeaderSize + length();
745 memset(reinterpret_cast<void*>(address() + data_size), 0,
746 SizeFor(length()) - data_size);
749 Address BytecodeArray::GetFirstBytecodeAddress() {
750 return ptr() - kHeapObjectTag + kHeaderSize;
753 ByteArray BytecodeArray::SourcePositionTable() {
754 Object* maybe_table = source_position_table();
755 if (maybe_table->IsByteArray())
return ByteArray::cast(maybe_table);
756 DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
757 return SourcePositionTableWithFrameCache::cast(maybe_table)
758 ->source_position_table();
761 void BytecodeArray::ClearFrameCacheFromSourcePositionTable() {
762 Object* maybe_table = source_position_table();
763 if (maybe_table->IsByteArray())
return;
764 DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
765 set_source_position_table(SourcePositionTableWithFrameCache::cast(maybe_table)
766 ->source_position_table());
769 int BytecodeArray::BytecodeArraySize() {
return SizeFor(this->length()); }
771 int BytecodeArray::SizeIncludingMetadata() {
772 int size = BytecodeArraySize();
773 size += constant_pool()->Size();
774 size += handler_table()->Size();
775 size += SourcePositionTable()->Size();
779 BailoutId DeoptimizationData::BytecodeOffset(
int i) {
780 return BailoutId(BytecodeOffsetRaw(
i)->value());
783 void DeoptimizationData::SetBytecodeOffset(
int i, BailoutId value) {
784 SetBytecodeOffsetRaw(
i, Smi::FromInt(value.ToInt()));
787 int DeoptimizationData::DeoptCount() {
788 return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
794 #include "src/objects/object-macros-undef.h" 796 #endif // V8_OBJECTS_CODE_INL_H_