5 #if V8_TARGET_ARCH_IA32 7 #include "src/base/bits.h" 8 #include "src/base/division-by-constant.h" 9 #include "src/base/utils/random-number-generator.h" 10 #include "src/bootstrapper.h" 11 #include "src/callable.h" 12 #include "src/code-factory.h" 13 #include "src/code-stubs.h" 14 #include "src/counters.h" 15 #include "src/debug/debug.h" 16 #include "src/external-reference-table.h" 17 #include "src/frame-constants.h" 18 #include "src/frames-inl.h" 19 #include "src/ia32/assembler-ia32-inl.h" 20 #include "src/macro-assembler.h" 21 #include "src/runtime/runtime.h" 22 #include "src/snapshot/embedded-data.h" 23 #include "src/snapshot/snapshot.h" 28 #include "src/ia32/macro-assembler-ia32.h" 37 MacroAssembler::MacroAssembler(Isolate* isolate,
38 const AssemblerOptions& options,
void* buffer,
39 int size, CodeObjectRequired create_code_object)
40 : TurboAssembler(isolate, options, buffer, size, create_code_object) {
41 if (create_code_object == CodeObjectRequired::kYes) {
47 code_object_ = Handle<HeapObject>::New(
48 *isolate->factory()->NewSelfReferenceMarker(), isolate);
52 void TurboAssembler::InitializeRootRegister() {
53 ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
54 Move(kRootRegister, Immediate(isolate_root));
57 void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
58 if (root_array_available()) {
60 Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
64 if (RootsTable::IsImmortalImmovable(index)) {
65 Handle<Object>
object = isolate()->root_handle(index);
66 if (object->IsSmi()) {
67 mov(destination, Immediate(Smi::cast(*
object)));
70 DCHECK(object->IsHeapObject());
71 mov(destination, Handle<HeapObject>::cast(
object));
76 ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
78 Operand(isolate_root.address(), RelocInfo::EXTERNAL_REFERENCE));
79 mov(destination, Operand(destination, RootRegisterOffsetForRootIndex(index)));
82 void TurboAssembler::CompareRoot(Register with, Register scratch,
84 if (root_array_available()) {
85 CompareRoot(with, index);
87 ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
89 Operand(isolate_root.address(), RelocInfo::EXTERNAL_REFERENCE));
90 cmp(with, Operand(scratch, RootRegisterOffsetForRootIndex(index)));
94 void TurboAssembler::CompareRoot(Register with, RootIndex index) {
95 if (root_array_available()) {
96 cmp(with, Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
100 DCHECK(RootsTable::IsImmortalImmovable(index));
101 Handle<Object>
object = isolate()->root_handle(index);
102 if (object->IsHeapObject()) {
103 cmp(with, Handle<HeapObject>::cast(
object));
105 cmp(with, Immediate(Smi::cast(*
object)));
109 void TurboAssembler::CompareStackLimit(Register with) {
110 if (root_array_available()) {
111 CompareRoot(with, RootIndex::kStackLimit);
113 DCHECK(!options().isolate_independent_code);
114 ExternalReference ref =
115 ExternalReference::address_of_stack_limit(isolate());
116 cmp(with, Operand(ref.address(), RelocInfo::EXTERNAL_REFERENCE));
120 void TurboAssembler::CompareRealStackLimit(Register with) {
121 if (root_array_available()) {
122 CompareRoot(with, RootIndex::kRealStackLimit);
124 DCHECK(!options().isolate_independent_code);
125 ExternalReference ref =
126 ExternalReference::address_of_real_stack_limit(isolate());
127 cmp(with, Operand(ref.address(), RelocInfo::EXTERNAL_REFERENCE));
131 void MacroAssembler::PushRoot(RootIndex index) {
132 if (root_array_available()) {
133 DCHECK(RootsTable::IsImmortalImmovable(index));
134 push(Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
139 DCHECK(RootsTable::IsImmortalImmovable(index));
140 Handle<Object>
object = isolate()->root_handle(index);
141 if (object->IsHeapObject()) {
142 Push(Handle<HeapObject>::cast(
object));
144 Push(Smi::cast(*
object));
148 Operand TurboAssembler::ExternalReferenceAsOperand(ExternalReference reference,
151 if (root_array_available() && options().isolate_independent_code) {
152 if (IsAddressableThroughRootRegister(isolate(), reference)) {
156 RootRegisterOffsetForExternalReference(isolate(), reference);
157 return Operand(kRootRegister, offset);
160 mov(scratch, Operand(kRootRegister,
161 RootRegisterOffsetForExternalReferenceTableEntry(
162 isolate(), reference)));
163 return Operand(scratch, 0);
166 Move(scratch, Immediate(reference));
167 return Operand(scratch, 0);
172 Operand TurboAssembler::ExternalReferenceAddressAsOperand(
173 ExternalReference reference) {
174 DCHECK(FLAG_embedded_builtins);
175 DCHECK(root_array_available());
176 DCHECK(options().isolate_independent_code);
179 RootRegisterOffsetForExternalReferenceTableEntry(isolate(), reference));
184 Operand TurboAssembler::HeapObjectAsOperand(Handle<HeapObject>
object) {
185 DCHECK(FLAG_embedded_builtins);
186 DCHECK(root_array_available());
189 RootIndex root_index;
190 if (isolate()->roots_table().IsRootHandle(
object, &root_index)) {
191 return Operand(kRootRegister, RootRegisterOffsetForRootIndex(root_index));
192 }
else if (isolate()->builtins()->IsBuiltinHandle(
object, &builtin_index)) {
193 return Operand(kRootRegister,
194 RootRegisterOffsetForBuiltinIndex(builtin_index));
195 }
else if (
object.is_identical_to(code_object_) &&
196 Builtins::IsBuiltinId(maybe_builtin_index_)) {
197 return Operand(kRootRegister,
198 RootRegisterOffsetForBuiltinIndex(maybe_builtin_index_));
206 void TurboAssembler::LoadFromConstantsTable(Register destination,
207 int constant_index) {
208 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kBuiltinsConstantsTable));
209 LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
211 FieldOperand(destination,
212 FixedArray::kHeaderSize + constant_index * kPointerSize));
215 void TurboAssembler::LoadRootRegisterOffset(Register destination,
217 DCHECK(is_int32(offset));
218 DCHECK(root_array_available());
220 mov(destination, kRootRegister);
222 lea(destination, Operand(kRootRegister, static_cast<int32_t>(offset)));
226 void TurboAssembler::LoadRootRelative(Register destination, int32_t offset) {
227 DCHECK(root_array_available());
228 mov(destination, Operand(kRootRegister, offset));
231 void TurboAssembler::LoadAddress(Register destination,
232 ExternalReference source) {
234 if (root_array_available() && options().isolate_independent_code) {
235 IndirectLoadExternalReference(destination, source);
238 mov(destination, Immediate(source));
241 static constexpr Register saved_regs[] = {eax, ecx, edx};
243 static constexpr
int kNumberOfSavedRegs =
sizeof(saved_regs) /
sizeof(Register);
245 int TurboAssembler::RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
248 Register exclusion3)
const {
250 for (
int i = 0;
i < kNumberOfSavedRegs;
i++) {
251 Register reg = saved_regs[
i];
252 if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
253 bytes += kPointerSize;
257 if (fp_mode == kSaveFPRegs) {
259 bytes += kDoubleSize * (XMMRegister::kNumRegisters - 1);
265 int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
266 Register exclusion2, Register exclusion3) {
271 for (
int i = 0;
i < kNumberOfSavedRegs;
i++) {
272 Register reg = saved_regs[
i];
273 if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
275 bytes += kPointerSize;
279 if (fp_mode == kSaveFPRegs) {
281 int delta = kDoubleSize * (XMMRegister::kNumRegisters - 1);
282 sub(esp, Immediate(delta));
283 for (
int i = XMMRegister::kNumRegisters - 1;
i > 0;
i--) {
284 XMMRegister reg = XMMRegister::from_code(
i);
285 movsd(Operand(esp, (
i - 1) * kDoubleSize), reg);
293 int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
294 Register exclusion2, Register exclusion3) {
296 if (fp_mode == kSaveFPRegs) {
298 int delta = kDoubleSize * (XMMRegister::kNumRegisters - 1);
299 for (
int i = XMMRegister::kNumRegisters - 1;
i > 0;
i--) {
300 XMMRegister reg = XMMRegister::from_code(
i);
301 movsd(reg, Operand(esp, (
i - 1) * kDoubleSize));
303 add(esp, Immediate(delta));
307 for (
int i = kNumberOfSavedRegs - 1;
i >= 0;
i--) {
308 Register reg = saved_regs[
i];
309 if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
311 bytes += kPointerSize;
318 void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
319 XMMRegister scratch, Label* lost_precision,
320 Label* is_nan, Label::Distance dst) {
321 DCHECK(input_reg != scratch);
322 cvttsd2si(result_reg, Operand(input_reg));
323 Cvtsi2sd(scratch, Operand(result_reg));
324 ucomisd(scratch, input_reg);
325 j(not_equal, lost_precision, dst);
326 j(parity_even, is_nan, dst);
329 void MacroAssembler::RecordWriteField(Register
object,
int offset,
330 Register value, Register dst,
331 SaveFPRegsMode save_fp,
332 RememberedSetAction remembered_set_action,
333 SmiCheck smi_check) {
339 if (smi_check == INLINE_SMI_CHECK) {
340 JumpIfSmi(value, &done);
345 DCHECK(IsAligned(offset, kPointerSize));
347 lea(dst, FieldOperand(
object, offset));
348 if (emit_debug_code()) {
350 test_b(dst, Immediate(kPointerSize - 1));
351 j(zero, &ok, Label::kNear);
356 RecordWrite(
object, dst, value, save_fp, remembered_set_action,
363 if (emit_debug_code()) {
364 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
365 mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
369 void TurboAssembler::SaveRegisters(RegList registers) {
370 DCHECK_GT(NumRegs(registers), 0);
371 for (
int i = 0;
i < Register::kNumRegisters; ++
i) {
372 if ((registers >>
i) & 1u) {
373 push(Register::from_code(
i));
378 void TurboAssembler::RestoreRegisters(RegList registers) {
379 DCHECK_GT(NumRegs(registers), 0);
380 for (
int i = Register::kNumRegisters - 1;
i >= 0; --
i) {
381 if ((registers >>
i) & 1u) {
382 pop(Register::from_code(
i));
387 void TurboAssembler::CallRecordWriteStub(
388 Register
object, Register address,
389 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
391 object, address, remembered_set_action, fp_mode,
392 isolate()->builtins()->builtin_handle(Builtins::kRecordWrite),
396 void TurboAssembler::CallRecordWriteStub(
397 Register
object, Register address,
398 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
399 Address wasm_target) {
400 CallRecordWriteStub(
object, address, remembered_set_action, fp_mode,
401 Handle<Code>::null(), wasm_target);
404 void TurboAssembler::CallRecordWriteStub(
405 Register
object, Register address,
406 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
407 Handle<Code> code_target, Address wasm_target) {
408 DCHECK_NE(code_target.is_null(), wasm_target == kNullAddress);
414 RecordWriteDescriptor descriptor;
415 RegList registers = descriptor.allocatable_registers();
417 SaveRegisters(registers);
419 Register object_parameter(
420 descriptor.GetRegisterParameter(RecordWriteDescriptor::kObject));
421 Register slot_parameter(
422 descriptor.GetRegisterParameter(RecordWriteDescriptor::kSlot));
423 Register remembered_set_parameter(
424 descriptor.GetRegisterParameter(RecordWriteDescriptor::kRememberedSet));
425 Register fp_mode_parameter(
426 descriptor.GetRegisterParameter(RecordWriteDescriptor::kFPMode));
432 pop(object_parameter);
434 Move(remembered_set_parameter, Smi::FromEnum(remembered_set_action));
435 Move(fp_mode_parameter, Smi::FromEnum(fp_mode));
436 if (code_target.is_null()) {
438 wasm_call(wasm_target, RelocInfo::WASM_STUB_CALL);
440 Call(code_target, RelocInfo::CODE_TARGET);
443 RestoreRegisters(registers);
446 void MacroAssembler::RecordWrite(Register
object, Register address,
447 Register value, SaveFPRegsMode fp_mode,
448 RememberedSetAction remembered_set_action,
449 SmiCheck smi_check) {
450 DCHECK(
object != value);
451 DCHECK(
object != address);
452 DCHECK(value != address);
453 AssertNotSmi(
object);
455 if (remembered_set_action == OMIT_REMEMBERED_SET &&
456 !FLAG_incremental_marking) {
460 if (emit_debug_code()) {
462 cmp(value, Operand(address, 0));
463 j(equal, &ok, Label::kNear);
472 if (smi_check == INLINE_SMI_CHECK) {
474 JumpIfSmi(value, &done, Label::kNear);
479 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
481 CheckPageFlag(
object,
483 MemoryChunk::kPointersFromHereAreInterestingMask,
488 CallRecordWriteStub(
object, address, remembered_set_action, fp_mode);
493 isolate()->counters()->write_barriers_static()->Increment();
494 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, value);
498 if (emit_debug_code()) {
499 mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
500 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
504 void MacroAssembler::MaybeDropFrames() {
507 ExternalReference restart_fp =
508 ExternalReference::debug_restart_fp_address(isolate());
509 mov(eax, ExternalReferenceAsOperand(restart_fp, eax));
511 j(zero, &dont_drop, Label::kNear);
513 Jump(BUILTIN_CODE(isolate(), FrameDropperTrampoline), RelocInfo::CODE_TARGET);
517 void TurboAssembler::Cvtsi2ss(XMMRegister dst, Operand src) {
522 void TurboAssembler::Cvtsi2sd(XMMRegister dst, Operand src) {
527 void TurboAssembler::Cvtui2ss(XMMRegister dst, Operand src, Register tmp) {
529 Register src_reg = src.is_reg_only() ? src.reg() : tmp;
530 if (src_reg == tmp) mov(tmp, src);
531 cvtsi2ss(dst, src_reg);
532 test(src_reg, src_reg);
533 j(positive, &done, Label::kNear);
536 if (src_reg != tmp) mov(tmp, src_reg);
540 j(not_carry, &msb_not_set, Label::kNear);
541 or_(tmp, Immediate(1));
548 void TurboAssembler::Cvttss2ui(Register dst, Operand src, XMMRegister tmp) {
553 Move(tmp, static_cast<float>(INT32_MIN));
556 or_(dst, Immediate(0x80000000));
560 void TurboAssembler::Cvtui2sd(XMMRegister dst, Operand src, Register scratch) {
562 cmp(src, Immediate(0));
563 ExternalReference uint32_bias = ExternalReference::address_of_uint32_bias();
565 j(not_sign, &done, Label::kNear);
566 addsd(dst, ExternalReferenceAsOperand(uint32_bias, scratch));
570 void TurboAssembler::Cvttsd2ui(Register dst, Operand src, XMMRegister tmp) {
571 Move(tmp, -2147483648.0);
574 add(dst, Immediate(0x80000000));
577 void TurboAssembler::ShlPair(Register high, Register low, uint8_t shift) {
580 shl(high, shift - 32);
583 shld(high, low, shift);
588 void TurboAssembler::ShlPair_cl(Register high, Register low) {
592 test(ecx, Immediate(0x20));
593 j(equal, &done, Label::kNear);
599 void TurboAssembler::ShrPair(Register high, Register low, uint8_t shift) {
602 shr(low, shift - 32);
605 shrd(high, low, shift);
610 void TurboAssembler::ShrPair_cl(Register high, Register low) {
614 test(ecx, Immediate(0x20));
615 j(equal, &done, Label::kNear);
621 void TurboAssembler::SarPair(Register high, Register low, uint8_t shift) {
624 sar(low, shift - 32);
627 shrd(high, low, shift);
632 void TurboAssembler::SarPair_cl(Register high, Register low) {
636 test(ecx, Immediate(0x20));
637 j(equal, &done, Label::kNear);
643 void MacroAssembler::CmpObjectType(Register heap_object,
646 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
647 CmpInstanceType(map, type);
651 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
652 cmpw(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
655 void MacroAssembler::AssertSmi(Register
object) {
656 if (emit_debug_code()) {
657 test(
object, Immediate(kSmiTagMask));
658 Check(equal, AbortReason::kOperandIsNotASmi);
662 void MacroAssembler::AssertConstructor(Register
object) {
663 if (emit_debug_code()) {
664 test(
object, Immediate(kSmiTagMask));
665 Check(not_equal, AbortReason::kOperandIsASmiAndNotAConstructor);
667 mov(
object, FieldOperand(
object, HeapObject::kMapOffset));
668 test_b(FieldOperand(
object, Map::kBitFieldOffset),
669 Immediate(Map::IsConstructorBit::kMask));
671 Check(not_zero, AbortReason::kOperandIsNotAConstructor);
675 void MacroAssembler::AssertFunction(Register
object) {
676 if (emit_debug_code()) {
677 test(
object, Immediate(kSmiTagMask));
678 Check(not_equal, AbortReason::kOperandIsASmiAndNotAFunction);
680 CmpObjectType(
object, JS_FUNCTION_TYPE,
object);
682 Check(equal, AbortReason::kOperandIsNotAFunction);
687 void MacroAssembler::AssertBoundFunction(Register
object) {
688 if (emit_debug_code()) {
689 test(
object, Immediate(kSmiTagMask));
690 Check(not_equal, AbortReason::kOperandIsASmiAndNotABoundFunction);
692 CmpObjectType(
object, JS_BOUND_FUNCTION_TYPE,
object);
694 Check(equal, AbortReason::kOperandIsNotABoundFunction);
698 void MacroAssembler::AssertGeneratorObject(Register
object) {
699 if (!emit_debug_code())
return;
701 test(
object, Immediate(kSmiTagMask));
702 Check(not_equal, AbortReason::kOperandIsASmiAndNotAGeneratorObject);
706 Register map = object;
709 mov(map, FieldOperand(
object, HeapObject::kMapOffset));
713 CmpInstanceType(map, JS_GENERATOR_OBJECT_TYPE);
714 j(equal, &do_check, Label::kNear);
717 CmpInstanceType(map, JS_ASYNC_FUNCTION_OBJECT_TYPE);
718 j(equal, &do_check, Label::kNear);
721 CmpInstanceType(map, JS_ASYNC_GENERATOR_OBJECT_TYPE);
727 Check(equal, AbortReason::kOperandIsNotAGeneratorObject);
730 void MacroAssembler::AssertUndefinedOrAllocationSite(Register
object,
732 if (emit_debug_code()) {
734 AssertNotSmi(
object);
735 CompareRoot(
object, scratch, RootIndex::kUndefinedValue);
736 j(equal, &done_checking);
737 LoadRoot(scratch, RootIndex::kAllocationSiteWithWeakNextMap);
738 cmp(FieldOperand(
object, 0), scratch);
739 Assert(equal, AbortReason::kExpectedUndefinedOrCell);
740 bind(&done_checking);
745 void MacroAssembler::AssertNotSmi(Register
object) {
746 if (emit_debug_code()) {
747 test(
object, Immediate(kSmiTagMask));
748 Check(not_equal, AbortReason::kOperandIsASmi);
752 void TurboAssembler::StubPrologue(StackFrame::Type type) {
755 push(Immediate(StackFrame::TypeToMarker(type)));
758 void TurboAssembler::Prologue() {
765 void TurboAssembler::EnterFrame(StackFrame::Type type) {
768 push(Immediate(StackFrame::TypeToMarker(type)));
771 void TurboAssembler::LeaveFrame(StackFrame::Type type) {
772 if (emit_debug_code()) {
773 cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
774 Immediate(StackFrame::TypeToMarker(type)));
775 Check(equal, AbortReason::kStackFrameTypesMustMatch);
781 void TurboAssembler::AllocateStackFrame(Register bytes_scratch) {
786 constexpr
int kPageSize = 4 * 1024;
788 Label touch_next_page;
790 bind(&touch_next_page);
791 sub(esp, Immediate(kPageSize));
793 mov(Operand(esp, 0), Immediate(0));
794 sub(bytes_scratch, Immediate(kPageSize));
797 cmp(bytes_scratch, kPageSize);
798 j(greater, &touch_next_page);
800 sub(esp, bytes_scratch);
804 void MacroAssembler::EnterExitFramePrologue(StackFrame::Type frame_type,
806 DCHECK(frame_type == StackFrame::EXIT ||
807 frame_type == StackFrame::BUILTIN_EXIT);
810 DCHECK_EQ(+2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
811 DCHECK_EQ(+1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
812 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
817 push(Immediate(StackFrame::TypeToMarker(frame_type)));
818 DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
820 DCHECK_EQ(-3 * kPointerSize, ExitFrameConstants::kCodeOffset);
821 Move(scratch, CodeObject());
824 STATIC_ASSERT(edx == kRuntimeCallFunctionRegister);
825 STATIC_ASSERT(esi == kContextRegister);
828 ExternalReference c_entry_fp_address =
829 ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate());
830 ExternalReference context_address =
831 ExternalReference::Create(IsolateAddressId::kContextAddress, isolate());
832 ExternalReference c_function_address =
833 ExternalReference::Create(IsolateAddressId::kCFunctionAddress, isolate());
835 DCHECK(!AreAliased(scratch, ebp, esi, edx));
836 mov(ExternalReferenceAsOperand(c_entry_fp_address, scratch), ebp);
837 mov(ExternalReferenceAsOperand(context_address, scratch), esi);
838 mov(ExternalReferenceAsOperand(c_function_address, scratch), edx);
842 void MacroAssembler::EnterExitFrameEpilogue(
int argc,
bool save_doubles) {
845 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
846 sub(esp, Immediate(space));
847 const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
848 for (
int i = 0;
i < XMMRegister::kNumRegisters;
i++) {
849 XMMRegister reg = XMMRegister::from_code(
i);
850 movsd(Operand(ebp, offset - ((
i + 1) * kDoubleSize)), reg);
853 sub(esp, Immediate(argc * kPointerSize));
857 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
858 if (kFrameAlignment > 0) {
859 DCHECK(base::bits::IsPowerOfTwo(kFrameAlignment));
860 and_(esp, -kFrameAlignment);
864 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
867 void MacroAssembler::EnterExitFrame(
int argc,
bool save_doubles,
868 StackFrame::Type frame_type) {
869 EnterExitFramePrologue(frame_type, edi);
872 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
874 lea(esi, Operand(ebp, eax, times_4, offset));
877 EnterExitFrameEpilogue(argc, save_doubles);
880 void MacroAssembler::EnterApiExitFrame(
int argc, Register scratch) {
881 EnterExitFramePrologue(StackFrame::EXIT, scratch);
882 EnterExitFrameEpilogue(argc,
false);
886 void MacroAssembler::LeaveExitFrame(
bool save_doubles,
bool pop_arguments) {
889 const int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
890 for (
int i = 0;
i < XMMRegister::kNumRegisters;
i++) {
891 XMMRegister reg = XMMRegister::from_code(
i);
892 movsd(reg, Operand(ebp, offset - ((
i + 1) * kDoubleSize)));
898 mov(ecx, Operand(ebp, 1 * kPointerSize));
899 mov(ebp, Operand(ebp, 0 * kPointerSize));
902 lea(esp, Operand(esi, 1 * kPointerSize));
911 LeaveExitFrameEpilogue();
914 void MacroAssembler::LeaveExitFrameEpilogue() {
916 ExternalReference c_entry_fp_address =
917 ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate());
918 mov(ExternalReferenceAsOperand(c_entry_fp_address, esi), Immediate(0));
921 ExternalReference context_address =
922 ExternalReference::Create(IsolateAddressId::kContextAddress, isolate());
923 mov(esi, ExternalReferenceAsOperand(context_address, esi));
926 mov(ExternalReferenceAsOperand(context_address, eax),
927 Immediate(Context::kInvalidContext));
932 void MacroAssembler::LeaveApiExitFrame() {
936 LeaveExitFrameEpilogue();
939 void MacroAssembler::PushStackHandler(Register scratch) {
941 STATIC_ASSERT(StackHandlerConstants::kSize == 2 * kPointerSize);
942 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
947 ExternalReference handler_address =
948 ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
949 push(ExternalReferenceAsOperand(handler_address, scratch));
952 mov(ExternalReferenceAsOperand(handler_address, scratch), esp);
955 void MacroAssembler::PopStackHandler(Register scratch) {
956 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
957 ExternalReference handler_address =
958 ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
959 pop(ExternalReferenceAsOperand(handler_address, scratch));
960 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
964 void MacroAssembler::CallStub(CodeStub* stub) {
965 DCHECK(AllowThisStubCall(stub));
966 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
969 void MacroAssembler::TailCallStub(CodeStub* stub) {
970 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
973 bool TurboAssembler::AllowThisStubCall(CodeStub* stub) {
974 return has_frame() || !stub->SometimesSetsUpAFrame();
977 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
979 SaveFPRegsMode save_doubles) {
983 CHECK(f->nargs < 0 || f->nargs == num_arguments);
989 Move(kRuntimeCallArgCountRegister, Immediate(num_arguments));
990 Move(kRuntimeCallFunctionRegister, Immediate(ExternalReference::Create(f)));
992 CodeFactory::CEntry(isolate(), f->result_size, save_doubles);
993 Call(code, RelocInfo::CODE_TARGET);
996 void TurboAssembler::CallRuntimeWithCEntry(Runtime::FunctionId fid,
998 const Runtime::Function* f = Runtime::FunctionForId(fid);
1003 Move(kRuntimeCallArgCountRegister, Immediate(f->nargs));
1004 Move(kRuntimeCallFunctionRegister, Immediate(ExternalReference::Create(f)));
1005 DCHECK(!AreAliased(centry, kRuntimeCallArgCountRegister,
1006 kRuntimeCallFunctionRegister));
1007 add(centry, Immediate(Code::kHeaderSize - kHeapObjectTag));
1011 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
1022 const Runtime::Function*
function = Runtime::FunctionForId(fid);
1023 DCHECK_EQ(1, function->result_size);
1024 if (function->nargs >= 0) {
1029 Move(kRuntimeCallArgCountRegister, Immediate(function->nargs));
1031 JumpToExternalReference(ExternalReference::Create(fid));
1034 void MacroAssembler::JumpToExternalReference(
const ExternalReference& ext,
1035 bool builtin_exit_frame) {
1037 Move(kRuntimeCallFunctionRegister, Immediate(ext));
1038 Handle<Code> code = CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs,
1039 kArgvOnStack, builtin_exit_frame);
1040 Jump(code, RelocInfo::CODE_TARGET);
1043 void MacroAssembler::JumpToInstructionStream(Address entry) {
1044 jmp(entry, RelocInfo::OFF_HEAP_TARGET);
1047 void TurboAssembler::PrepareForTailCall(
1048 const ParameterCount& callee_args_count, Register caller_args_count_reg,
1049 Register scratch0, Register scratch1,
1050 int number_of_temp_values_after_return_address) {
1052 if (callee_args_count.is_reg()) {
1053 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
1056 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
1062 Register new_sp_reg = scratch0;
1063 if (callee_args_count.is_reg()) {
1064 sub(caller_args_count_reg, callee_args_count.reg());
1066 Operand(ebp, caller_args_count_reg, times_pointer_size,
1067 StandardFrameConstants::kCallerPCOffset -
1068 number_of_temp_values_after_return_address * kPointerSize));
1070 lea(new_sp_reg, Operand(ebp, caller_args_count_reg, times_pointer_size,
1071 StandardFrameConstants::kCallerPCOffset -
1072 (callee_args_count.immediate() +
1073 number_of_temp_values_after_return_address) *
1077 if (FLAG_debug_code) {
1078 cmp(esp, new_sp_reg);
1079 Check(below, AbortReason::kStackAccessBelowStackPointer);
1085 Register tmp_reg = scratch1;
1086 mov(tmp_reg, Operand(ebp, StandardFrameConstants::kCallerPCOffset));
1087 mov(Operand(esp, number_of_temp_values_after_return_address * kPointerSize),
1092 mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1095 Register count_reg = caller_args_count_reg;
1096 if (callee_args_count.is_reg()) {
1097 lea(count_reg, Operand(callee_args_count.reg(),
1098 2 + number_of_temp_values_after_return_address));
1100 mov(count_reg, Immediate(callee_args_count.immediate() + 2 +
1101 number_of_temp_values_after_return_address));
1108 jmp(&entry, Label::kNear);
1111 mov(tmp_reg, Operand(esp, count_reg, times_pointer_size, 0));
1112 mov(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
1114 cmp(count_reg, Immediate(0));
1115 j(not_equal, &loop, Label::kNear);
1118 mov(esp, new_sp_reg);
1121 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
1122 const ParameterCount& actual, Label* done,
1123 bool* definitely_mismatches,
1125 Label::Distance done_near) {
1126 DCHECK_IMPLIES(expected.is_reg(), expected.reg() == ecx);
1127 DCHECK_IMPLIES(actual.is_reg(), actual.reg() == eax);
1129 bool definitely_matches =
false;
1130 *definitely_mismatches =
false;
1132 if (expected.is_immediate()) {
1133 DCHECK(actual.is_immediate());
1134 mov(eax, actual.immediate());
1135 if (expected.immediate() == actual.immediate()) {
1136 definitely_matches =
true;
1138 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1139 if (expected.immediate() == sentinel) {
1144 definitely_matches =
true;
1146 *definitely_mismatches =
true;
1147 mov(ecx, expected.immediate());
1151 if (actual.is_immediate()) {
1155 mov(eax, actual.immediate());
1156 cmp(expected.reg(), actual.immediate());
1158 DCHECK(expected.reg() == ecx);
1159 }
else if (expected.reg() != actual.reg()) {
1162 cmp(expected.reg(), actual.reg());
1164 DCHECK(actual.reg() == eax);
1165 DCHECK(expected.reg() == ecx);
1167 definitely_matches =
true;
1168 Move(eax, actual.reg());
1172 if (!definitely_matches) {
1173 Handle<Code> adaptor = BUILTIN_CODE(isolate(), ArgumentsAdaptorTrampoline);
1174 if (flag == CALL_FUNCTION) {
1175 Call(adaptor, RelocInfo::CODE_TARGET);
1176 if (!*definitely_mismatches) {
1177 jmp(done, done_near);
1180 Jump(adaptor, RelocInfo::CODE_TARGET);
1186 void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
1187 const ParameterCount& expected,
1188 const ParameterCount& actual) {
1191 ExternalReference debug_hook_active =
1192 ExternalReference::debug_hook_on_function_call_address(isolate());
1194 cmpb(ExternalReferenceAsOperand(debug_hook_active, eax), Immediate(0));
1196 j(equal, &skip_hook);
1199 FrameScope frame(
this,
1200 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
1201 if (expected.is_reg()) {
1202 SmiTag(expected.reg());
1203 Push(expected.reg());
1205 if (actual.is_reg()) {
1206 SmiTag(actual.reg());
1208 SmiUntag(actual.reg());
1210 if (new_target.is_valid()) {
1215 Operand receiver_op =
1217 ? Operand(ebp, actual.reg(), times_pointer_size, kPointerSize * 2)
1218 : Operand(ebp, actual.immediate() * times_pointer_size +
1221 CallRuntime(Runtime::kDebugOnFunctionCall);
1223 if (new_target.is_valid()) {
1226 if (actual.is_reg()) {
1228 SmiUntag(actual.reg());
1230 if (expected.is_reg()) {
1231 Pop(expected.reg());
1232 SmiUntag(expected.reg());
1238 void MacroAssembler::InvokeFunctionCode(Register
function, Register new_target,
1239 const ParameterCount& expected,
1240 const ParameterCount& actual,
1243 DCHECK(flag == JUMP_FUNCTION || has_frame());
1244 DCHECK(
function == edi);
1245 DCHECK_IMPLIES(new_target.is_valid(), new_target == edx);
1246 DCHECK_IMPLIES(expected.is_reg(), expected.reg() == ecx);
1247 DCHECK_IMPLIES(actual.is_reg(), actual.reg() == eax);
1250 CheckDebugHook(
function, new_target, expected, actual);
1253 if (!new_target.is_valid()) {
1254 Move(edx, isolate()->factory()->undefined_value());
1258 bool definitely_mismatches =
false;
1259 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
1261 if (!definitely_mismatches) {
1265 static_assert(kJavaScriptCallCodeStartRegister == ecx,
"ABI mismatch");
1266 mov(ecx, FieldOperand(
function, JSFunction::kCodeOffset));
1267 add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1268 if (flag == CALL_FUNCTION) {
1271 DCHECK(flag == JUMP_FUNCTION);
1278 void MacroAssembler::InvokeFunction(Register fun, Register new_target,
1279 const ParameterCount& actual,
1282 DCHECK(flag == JUMP_FUNCTION || has_frame());
1285 mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1286 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1288 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
1290 ParameterCount expected(ecx);
1291 InvokeFunctionCode(edi, new_target, expected, actual, flag);
1294 void MacroAssembler::LoadGlobalProxy(Register dst) {
1295 mov(dst, NativeContextOperand());
1296 mov(dst, ContextOperand(dst, Context::GLOBAL_PROXY_INDEX));
1299 void MacroAssembler::LoadGlobalFunction(
int index, Register
function) {
1301 mov(
function, NativeContextOperand());
1303 mov(
function, ContextOperand(
function, index));
1306 int MacroAssembler::SafepointRegisterStackIndex(
int reg_code) {
1310 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
1311 return kNumSafepointRegisters - reg_code - 1;
1314 void TurboAssembler::Ret() { ret(0); }
1316 void TurboAssembler::Ret(
int bytes_dropped, Register scratch) {
1317 if (is_uint16(bytes_dropped)) {
1321 add(esp, Immediate(bytes_dropped));
1327 void TurboAssembler::Push(Immediate value) {
1328 if (root_array_available() && options().isolate_independent_code) {
1329 if (value.is_embedded_object()) {
1330 Push(HeapObjectAsOperand(value.embedded_object()));
1332 }
else if (value.is_external_reference()) {
1333 Push(ExternalReferenceAddressAsOperand(value.external_reference()));
1340 void MacroAssembler::Drop(
int stack_elements) {
1341 if (stack_elements > 0) {
1342 add(esp, Immediate(stack_elements * kPointerSize));
1346 void TurboAssembler::Move(Register dst, Register src) {
1352 void TurboAssembler::Move(Register dst,
const Immediate& src) {
1353 if (!src.is_heap_object_request() && src.is_zero()) {
1355 }
else if (src.is_external_reference()) {
1356 LoadAddress(dst, src.external_reference());
1362 void TurboAssembler::Move(Operand dst,
const Immediate& src) {
1365 if (root_array_available() && options().isolate_independent_code) {
1366 if (src.is_embedded_object() || src.is_external_reference() ||
1367 src.is_heap_object_request()) {
1374 if (src.is_embedded_object()) {
1375 mov(dst, src.embedded_object());
1381 void TurboAssembler::Move(Register dst, Handle<HeapObject> src) {
1382 if (root_array_available() && options().isolate_independent_code) {
1383 IndirectLoadConstant(dst, src);
1389 void TurboAssembler::Move(XMMRegister dst,
uint32_t src) {
1393 unsigned cnt = base::bits::CountPopulation(src);
1394 unsigned nlz = base::bits::CountLeadingZeros32(src);
1395 unsigned ntz = base::bits::CountTrailingZeros32(src);
1396 if (nlz + cnt + ntz == 32) {
1399 psrld(dst, 32 - cnt);
1401 pslld(dst, 32 - cnt);
1402 if (nlz != 0) psrld(dst, nlz);
1406 mov(eax, Immediate(src));
1407 movd(dst, Operand(eax));
1413 void TurboAssembler::Move(XMMRegister dst, uint64_t src) {
1419 unsigned cnt = base::bits::CountPopulation(src);
1420 unsigned nlz = base::bits::CountLeadingZeros64(src);
1421 unsigned ntz = base::bits::CountTrailingZeros64(src);
1422 if (nlz + cnt + ntz == 64) {
1425 psrlq(dst, 64 - cnt);
1427 psllq(dst, 64 - cnt);
1428 if (nlz != 0) psrlq(dst, nlz);
1430 }
else if (lower == 0) {
1433 }
else if (CpuFeatures::IsSupported(SSE4_1)) {
1434 CpuFeatureScope scope(
this, SSE4_1);
1436 Move(eax, Immediate(lower));
1437 movd(dst, Operand(eax));
1438 if (upper != lower) {
1439 Move(eax, Immediate(upper));
1441 pinsrd(dst, Operand(eax), 1);
1444 push(Immediate(upper));
1445 push(Immediate(lower));
1446 movsd(dst, Operand(esp, 0));
1447 add(esp, Immediate(kDoubleSize));
1452 void TurboAssembler::Pshufhw(XMMRegister dst, Operand src, uint8_t shuffle) {
1453 if (CpuFeatures::IsSupported(AVX)) {
1454 CpuFeatureScope scope(
this, AVX);
1455 vpshufhw(dst, src, shuffle);
1457 pshufhw(dst, src, shuffle);
1461 void TurboAssembler::Pshuflw(XMMRegister dst, Operand src, uint8_t shuffle) {
1462 if (CpuFeatures::IsSupported(AVX)) {
1463 CpuFeatureScope scope(
this, AVX);
1464 vpshuflw(dst, src, shuffle);
1466 pshuflw(dst, src, shuffle);
1470 void TurboAssembler::Pshufd(XMMRegister dst, Operand src, uint8_t shuffle) {
1471 if (CpuFeatures::IsSupported(AVX)) {
1472 CpuFeatureScope scope(
this, AVX);
1473 vpshufd(dst, src, shuffle);
1475 pshufd(dst, src, shuffle);
1479 void TurboAssembler::Psraw(XMMRegister dst, uint8_t shift) {
1480 if (CpuFeatures::IsSupported(AVX)) {
1481 CpuFeatureScope scope(
this, AVX);
1482 vpsraw(dst, dst, shift);
1488 void TurboAssembler::Psrlw(XMMRegister dst, uint8_t shift) {
1489 if (CpuFeatures::IsSupported(AVX)) {
1490 CpuFeatureScope scope(
this, AVX);
1491 vpsrlw(dst, dst, shift);
1497 void TurboAssembler::Psignb(XMMRegister dst, Operand src) {
1498 if (CpuFeatures::IsSupported(AVX)) {
1499 CpuFeatureScope scope(
this, AVX);
1500 vpsignb(dst, dst, src);
1503 if (CpuFeatures::IsSupported(SSSE3)) {
1504 CpuFeatureScope sse_scope(
this, SSSE3);
1508 FATAL(
"no AVX or SSE3 support");
1511 void TurboAssembler::Psignw(XMMRegister dst, Operand src) {
1512 if (CpuFeatures::IsSupported(AVX)) {
1513 CpuFeatureScope scope(
this, AVX);
1514 vpsignw(dst, dst, src);
1517 if (CpuFeatures::IsSupported(SSSE3)) {
1518 CpuFeatureScope sse_scope(
this, SSSE3);
1522 FATAL(
"no AVX or SSE3 support");
1525 void TurboAssembler::Psignd(XMMRegister dst, Operand src) {
1526 if (CpuFeatures::IsSupported(AVX)) {
1527 CpuFeatureScope scope(
this, AVX);
1528 vpsignd(dst, dst, src);
1531 if (CpuFeatures::IsSupported(SSSE3)) {
1532 CpuFeatureScope sse_scope(
this, SSSE3);
1536 FATAL(
"no AVX or SSE3 support");
1539 void TurboAssembler::Pshufb(XMMRegister dst, Operand src) {
1540 if (CpuFeatures::IsSupported(AVX)) {
1541 CpuFeatureScope scope(
this, AVX);
1542 vpshufb(dst, dst, src);
1545 if (CpuFeatures::IsSupported(SSSE3)) {
1546 CpuFeatureScope sse_scope(
this, SSSE3);
1550 FATAL(
"no AVX or SSE3 support");
1553 void TurboAssembler::Pblendw(XMMRegister dst, Operand src, uint8_t imm8) {
1554 if (CpuFeatures::IsSupported(AVX)) {
1555 CpuFeatureScope scope(
this, AVX);
1556 vpblendw(dst, dst, src, imm8);
1559 if (CpuFeatures::IsSupported(SSE4_1)) {
1560 CpuFeatureScope sse_scope(
this, SSE4_1);
1561 pblendw(dst, src, imm8);
1564 FATAL(
"no AVX or SSE4.1 support");
1567 void TurboAssembler::Palignr(XMMRegister dst, Operand src, uint8_t imm8) {
1568 if (CpuFeatures::IsSupported(AVX)) {
1569 CpuFeatureScope scope(
this, AVX);
1570 vpalignr(dst, dst, src, imm8);
1573 if (CpuFeatures::IsSupported(SSSE3)) {
1574 CpuFeatureScope sse_scope(
this, SSSE3);
1575 palignr(dst, src, imm8);
1578 FATAL(
"no AVX or SSE3 support");
1581 void TurboAssembler::Pextrb(Register dst, XMMRegister src, uint8_t imm8) {
1582 if (CpuFeatures::IsSupported(AVX)) {
1583 CpuFeatureScope scope(
this, AVX);
1584 vpextrb(dst, src, imm8);
1587 if (CpuFeatures::IsSupported(SSE4_1)) {
1588 CpuFeatureScope sse_scope(
this, SSE4_1);
1589 pextrb(dst, src, imm8);
1592 FATAL(
"no AVX or SSE4.1 support");
1595 void TurboAssembler::Pextrw(Register dst, XMMRegister src, uint8_t imm8) {
1596 if (CpuFeatures::IsSupported(AVX)) {
1597 CpuFeatureScope scope(
this, AVX);
1598 vpextrw(dst, src, imm8);
1601 if (CpuFeatures::IsSupported(SSE4_1)) {
1602 CpuFeatureScope sse_scope(
this, SSE4_1);
1603 pextrw(dst, src, imm8);
1606 FATAL(
"no AVX or SSE4.1 support");
1609 void TurboAssembler::Pextrd(Register dst, XMMRegister src, uint8_t imm8) {
1614 if (CpuFeatures::IsSupported(AVX)) {
1615 CpuFeatureScope scope(
this, AVX);
1616 vpextrd(dst, src, imm8);
1619 if (CpuFeatures::IsSupported(SSE4_1)) {
1620 CpuFeatureScope sse_scope(
this, SSE4_1);
1621 pextrd(dst, src, imm8);
1628 sub(esp, Immediate(kDoubleSize));
1629 movsd(Operand(esp, 0), src);
1630 mov(dst, Operand(esp, imm8 * kUInt32Size));
1631 add(esp, Immediate(kDoubleSize));
1634 void TurboAssembler::Pinsrd(XMMRegister dst, Operand src, uint8_t imm8) {
1635 if (CpuFeatures::IsSupported(AVX)) {
1636 CpuFeatureScope scope(
this, AVX);
1637 vpinsrd(dst, dst, src, imm8);
1640 if (CpuFeatures::IsSupported(SSE4_1)) {
1641 CpuFeatureScope sse_scope(
this, SSE4_1);
1642 pinsrd(dst, src, imm8);
1649 sub(esp, Immediate(kDoubleSize));
1651 movsd(Operand(esp, 0), dst);
1653 if (src.is_reg_only()) {
1654 mov(Operand(esp, imm8 * kUInt32Size), src.reg());
1657 movss(Operand(esp, imm8 * kUInt32Size), dst);
1660 movsd(dst, Operand(esp, 0));
1661 add(esp, Immediate(kDoubleSize));
1664 void TurboAssembler::Lzcnt(Register dst, Operand src) {
1665 if (CpuFeatures::IsSupported(LZCNT)) {
1666 CpuFeatureScope scope(
this, LZCNT);
1672 j(not_zero, ¬_zero_src, Label::kNear);
1673 Move(dst, Immediate(63));
1674 bind(¬_zero_src);
1675 xor_(dst, Immediate(31));
1678 void TurboAssembler::Tzcnt(Register dst, Operand src) {
1679 if (CpuFeatures::IsSupported(BMI1)) {
1680 CpuFeatureScope scope(
this, BMI1);
1686 j(not_zero, ¬_zero_src, Label::kNear);
1687 Move(dst, Immediate(32));
1688 bind(¬_zero_src);
1691 void TurboAssembler::Popcnt(Register dst, Operand src) {
1692 if (CpuFeatures::IsSupported(POPCNT)) {
1693 CpuFeatureScope scope(
this, POPCNT);
1697 FATAL(
"no POPCNT support");
1700 void MacroAssembler::LoadWeakValue(Register in_out, Label* target_if_cleared) {
1701 cmp(in_out, Immediate(kClearedWeakHeapObjectLower32));
1702 j(equal, target_if_cleared);
1704 and_(in_out, Immediate(~kWeakHeapObjectMask));
1707 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value,
1709 DCHECK_GT(value, 0);
1710 if (FLAG_native_code_counters && counter->Enabled()) {
1712 ExternalReferenceAsOperand(ExternalReference::Create(counter), scratch);
1716 add(operand, Immediate(value));
1721 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value,
1723 DCHECK_GT(value, 0);
1724 if (FLAG_native_code_counters && counter->Enabled()) {
1726 ExternalReferenceAsOperand(ExternalReference::Create(counter), scratch);
1730 sub(operand, Immediate(value));
1735 void TurboAssembler::Assert(Condition cc, AbortReason reason) {
1736 if (emit_debug_code()) Check(cc, reason);
1739 void TurboAssembler::AssertUnreachable(AbortReason reason) {
1740 if (emit_debug_code()) Abort(reason);
1743 void TurboAssembler::Check(Condition cc, AbortReason reason) {
1751 void TurboAssembler::CheckStackAlignment() {
1752 int frame_alignment = base::OS::ActivationFrameAlignment();
1753 int frame_alignment_mask = frame_alignment - 1;
1754 if (frame_alignment > kPointerSize) {
1755 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
1756 Label alignment_as_expected;
1757 test(esp, Immediate(frame_alignment_mask));
1758 j(zero, &alignment_as_expected);
1761 bind(&alignment_as_expected);
1765 void TurboAssembler::Abort(AbortReason reason) {
1767 const char* msg = GetAbortReason(reason);
1768 RecordComment(
"Abort message: ");
1773 if (trap_on_abort()) {
1778 if (should_abort_hard()) {
1780 FrameScope assume_frame(
this, StackFrame::NONE);
1781 PrepareCallCFunction(1, eax);
1782 mov(Operand(esp, 0), Immediate(static_cast<int>(reason)));
1783 CallCFunction(ExternalReference::abort_with_reason(), 1);
1787 Move(edx, Smi::FromInt(static_cast<int>(reason)));
1793 FrameScope scope(
this, StackFrame::NONE);
1794 Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
1796 Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
1803 void TurboAssembler::PrepareCallCFunction(
int num_arguments, Register scratch) {
1804 int frame_alignment = base::OS::ActivationFrameAlignment();
1805 if (frame_alignment != 0) {
1809 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
1810 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
1811 and_(esp, -frame_alignment);
1812 mov(Operand(esp, num_arguments * kPointerSize), scratch);
1814 sub(esp, Immediate(num_arguments * kPointerSize));
1818 void TurboAssembler::CallCFunction(ExternalReference
function,
1819 int num_arguments) {
1821 Move(eax, Immediate(
function));
1822 CallCFunction(eax, num_arguments);
1825 void TurboAssembler::CallCFunction(Register
function,
int num_arguments) {
1826 DCHECK_LE(num_arguments, kMaxCParameters);
1827 DCHECK(has_frame());
1829 if (emit_debug_code()) {
1830 CheckStackAlignment();
1834 if (base::OS::ActivationFrameAlignment() != 0) {
1835 mov(esp, Operand(esp, num_arguments * kPointerSize));
1837 add(esp, Immediate(num_arguments * kPointerSize));
1841 void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1842 if (FLAG_embedded_builtins) {
1843 if (root_array_available() && options().isolate_independent_code &&
1844 !Builtins::IsIsolateIndependentBuiltin(*code_object)) {
1849 }
else if (options().inline_offheap_trampolines) {
1850 int builtin_index = Builtins::kNoBuiltinId;
1851 if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
1852 Builtins::IsIsolateIndependent(builtin_index)) {
1854 RecordCommentForOffHeapTrampoline(builtin_index);
1855 CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
1856 EmbeddedData d = EmbeddedData::FromBlob();
1857 Address entry = d.InstructionStartOfBuiltin(builtin_index);
1858 call(entry, RelocInfo::OFF_HEAP_TARGET);
1863 DCHECK(RelocInfo::IsCodeTarget(rmode));
1864 call(code_object, rmode);
1867 void TurboAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
1868 if (FLAG_embedded_builtins) {
1869 if (root_array_available() && options().isolate_independent_code &&
1870 !Builtins::IsIsolateIndependentBuiltin(*code_object)) {
1875 }
else if (options().inline_offheap_trampolines) {
1876 int builtin_index = Builtins::kNoBuiltinId;
1877 if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
1878 Builtins::IsIsolateIndependent(builtin_index)) {
1880 RecordCommentForOffHeapTrampoline(builtin_index);
1881 CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
1882 EmbeddedData d = EmbeddedData::FromBlob();
1883 Address entry = d.InstructionStartOfBuiltin(builtin_index);
1884 jmp(entry, RelocInfo::OFF_HEAP_TARGET);
1889 DCHECK(RelocInfo::IsCodeTarget(rmode));
1890 jmp(code_object, rmode);
1893 void TurboAssembler::RetpolineCall(Register reg) {
1894 Label setup_return, setup_target, inner_indirect_branch, capture_spec;
1898 bind(&inner_indirect_branch);
1899 call(&setup_target);
1901 bind(&capture_spec);
1905 bind(&setup_target);
1906 mov(Operand(esp, 0), reg);
1909 bind(&setup_return);
1910 call(&inner_indirect_branch);
1913 void TurboAssembler::RetpolineCall(Address destination, RelocInfo::Mode rmode) {
1914 Label setup_return, setup_target, inner_indirect_branch, capture_spec;
1918 bind(&inner_indirect_branch);
1919 call(&setup_target);
1921 bind(&capture_spec);
1925 bind(&setup_target);
1926 mov(Operand(esp, 0), destination, rmode);
1929 bind(&setup_return);
1930 call(&inner_indirect_branch);
1933 void TurboAssembler::RetpolineJump(Register reg) {
1934 Label setup_target, capture_spec;
1936 call(&setup_target);
1938 bind(&capture_spec);
1942 bind(&setup_target);
1943 mov(Operand(esp, 0), reg);
1947 void TurboAssembler::CheckPageFlag(Register
object, Register scratch,
int mask,
1948 Condition cc, Label* condition_met,
1949 Label::Distance condition_met_distance) {
1950 DCHECK(cc == zero || cc == not_zero);
1951 if (scratch ==
object) {
1952 and_(scratch, Immediate(~kPageAlignmentMask));
1954 mov(scratch, Immediate(~kPageAlignmentMask));
1955 and_(scratch,
object);
1957 if (mask < (1 << kBitsPerByte)) {
1958 test_b(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
1960 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
1962 j(cc, condition_met, condition_met_distance);
1965 void TurboAssembler::ComputeCodeStartAddress(Register dst) {
1971 int pc = pc_offset();
1975 sub(dst, Immediate(pc));
1982 #endif // V8_TARGET_ARCH_IA32