7 #include "src/base/bits.h" 8 #include "src/base/division-by-constant.h" 9 #include "src/base/utils/random-number-generator.h" 10 #include "src/bootstrapper.h" 11 #include "src/callable.h" 12 #include "src/code-factory.h" 13 #include "src/code-stubs.h" 14 #include "src/counters.h" 15 #include "src/debug/debug.h" 16 #include "src/external-reference-table.h" 17 #include "src/frames-inl.h" 18 #include "src/globals.h" 19 #include "src/macro-assembler.h" 20 #include "src/objects-inl.h" 21 #include "src/objects/smi.h" 22 #include "src/register-configuration.h" 23 #include "src/snapshot/embedded-data.h" 24 #include "src/snapshot/snapshot.h" 25 #include "src/string-constants.h" 26 #include "src/x64/assembler-x64.h" 31 #include "src/x64/macro-assembler-x64.h" 37 Operand StackArgumentsAccessor::GetArgumentOperand(
int index) {
39 int receiver = (receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER) ? 1 : 0;
40 int displacement_to_last_argument =
41 base_reg_ == rsp ? kPCOnStackSize : kFPOnStackSize + kPCOnStackSize;
42 displacement_to_last_argument += extra_displacement_to_last_argument_;
43 if (argument_count_reg_ == no_reg) {
46 DCHECK_GT(argument_count_immediate_ + receiver, 0);
49 displacement_to_last_argument +
50 (argument_count_immediate_ + receiver - 1 - index) * kPointerSize);
55 base_reg_, argument_count_reg_, times_pointer_size,
56 displacement_to_last_argument + (receiver - 1 - index) * kPointerSize);
60 StackArgumentsAccessor::StackArgumentsAccessor(
61 Register base_reg,
const ParameterCount& parameter_count,
62 StackArgumentsAccessorReceiverMode receiver_mode,
63 int extra_displacement_to_last_argument)
64 : base_reg_(base_reg),
65 argument_count_reg_(parameter_count.is_reg() ? parameter_count.reg()
67 argument_count_immediate_(
68 parameter_count.is_immediate() ? parameter_count.immediate() : 0),
69 receiver_mode_(receiver_mode),
70 extra_displacement_to_last_argument_(
71 extra_displacement_to_last_argument) {}
73 MacroAssembler::MacroAssembler(Isolate* isolate,
74 const AssemblerOptions& options,
void* buffer,
75 int size, CodeObjectRequired create_code_object)
76 : TurboAssembler(isolate, options, buffer, size, create_code_object) {
77 if (create_code_object == CodeObjectRequired::kYes) {
83 code_object_ = Handle<HeapObject>::New(
84 *isolate->factory()->NewSelfReferenceMarker(), isolate);
89 void MacroAssembler::Load(Register destination, ExternalReference source) {
90 if (root_array_available_ && options().enable_root_array_delta_access) {
91 intptr_t delta = RootRegisterOffsetForExternalReference(isolate(), source);
92 if (is_int32(delta)) {
93 movp(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
98 if (FLAG_embedded_builtins) {
99 if (root_array_available_ && options().isolate_independent_code) {
100 IndirectLoadExternalReference(kScratchRegister, source);
101 movp(destination, Operand(kScratchRegister, 0));
105 if (destination == rax) {
108 Move(kScratchRegister, source);
109 movp(destination, Operand(kScratchRegister, 0));
114 void MacroAssembler::Store(ExternalReference destination, Register source) {
115 if (root_array_available_ && options().enable_root_array_delta_access) {
117 RootRegisterOffsetForExternalReference(isolate(), destination);
118 if (is_int32(delta)) {
119 movp(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
125 store_rax(destination);
127 Move(kScratchRegister, destination);
128 movp(Operand(kScratchRegister, 0), source);
132 void TurboAssembler::LoadFromConstantsTable(Register destination,
133 int constant_index) {
134 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kBuiltinsConstantsTable));
135 LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
137 FieldOperand(destination,
138 FixedArray::kHeaderSize + constant_index * kPointerSize));
141 void TurboAssembler::LoadRootRegisterOffset(Register destination,
143 DCHECK(is_int32(offset));
145 Move(destination, kRootRegister);
147 leap(destination, Operand(kRootRegister, static_cast<int32_t>(offset)));
151 void TurboAssembler::LoadRootRelative(Register destination, int32_t offset) {
152 movp(destination, Operand(kRootRegister, offset));
155 void TurboAssembler::LoadAddress(Register destination,
156 ExternalReference source) {
157 if (root_array_available_ && options().enable_root_array_delta_access) {
158 intptr_t delta = RootRegisterOffsetForExternalReference(isolate(), source);
159 if (is_int32(delta)) {
160 leap(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
165 if (FLAG_embedded_builtins) {
166 if (root_array_available_ && options().isolate_independent_code) {
167 IndirectLoadExternalReference(destination, source);
171 Move(destination, source);
174 Operand TurboAssembler::ExternalReferenceAsOperand(ExternalReference reference,
176 if (root_array_available_ && options().enable_root_array_delta_access) {
178 RootRegisterOffsetForExternalReference(isolate(), reference);
179 if (is_int32(delta)) {
180 return Operand(kRootRegister, static_cast<int32_t>(delta));
183 if (root_array_available_ && options().isolate_independent_code) {
184 if (IsAddressableThroughRootRegister(isolate(), reference)) {
188 RootRegisterOffsetForExternalReference(isolate(), reference);
189 CHECK(is_int32(offset));
190 return Operand(kRootRegister, static_cast<int32_t>(offset));
193 movp(scratch, Operand(kRootRegister,
194 RootRegisterOffsetForExternalReferenceTableEntry(
195 isolate(), reference)));
196 return Operand(scratch, 0);
199 Move(scratch, reference);
200 return Operand(scratch, 0);
203 void MacroAssembler::PushAddress(ExternalReference source) {
204 LoadAddress(kScratchRegister, source);
205 Push(kScratchRegister);
208 void TurboAssembler::LoadRoot(Register destination, RootIndex index) {
209 DCHECK(root_array_available_);
211 Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
214 void MacroAssembler::PushRoot(RootIndex index) {
215 DCHECK(root_array_available_);
216 Push(Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
219 void TurboAssembler::CompareRoot(Register with, RootIndex index) {
220 DCHECK(root_array_available_);
221 cmpp(with, Operand(kRootRegister, RootRegisterOffsetForRootIndex(index)));
224 void TurboAssembler::CompareRoot(Operand with, RootIndex index) {
225 DCHECK(root_array_available_);
226 DCHECK(!with.AddressUsesRegister(kScratchRegister));
227 LoadRoot(kScratchRegister, index);
228 cmpp(with, kScratchRegister);
231 void TurboAssembler::DecompressTaggedSigned(Register destination,
232 Operand field_operand,
233 Register scratch_for_debug) {
234 RecordComment(
"[ DecompressTaggedSigned");
235 if (DEBUG_BOOL && scratch_for_debug.is_valid()) {
236 Register expected_value = scratch_for_debug;
237 movq(expected_value, field_operand);
238 movsxlq(destination, expected_value);
240 cmpq(destination, expected_value);
241 j(equal, &check_passed);
242 RecordComment(
"DecompressTaggedSigned failed");
246 movsxlq(destination, field_operand);
251 void TurboAssembler::DecompressTaggedPointer(Register destination,
252 Operand field_operand,
253 Register scratch_for_debug) {
254 RecordComment(
"[ DecompressTaggedPointer");
255 if (DEBUG_BOOL && scratch_for_debug.is_valid()) {
256 Register expected_value = scratch_for_debug;
257 movq(expected_value, field_operand);
258 movsxlq(destination, expected_value);
259 addq(destination, kRootRegister);
261 cmpq(destination, expected_value);
262 j(equal, &check_passed);
263 RecordComment(
"DecompressTaggedPointer failed");
267 movsxlq(destination, field_operand);
268 addq(destination, kRootRegister);
273 void TurboAssembler::DecompressAnyTagged(Register destination,
274 Operand field_operand,
276 Register scratch_for_debug) {
277 RecordComment(
"[ DecompressAnyTagged");
278 Register expected_value = scratch_for_debug;
279 if (DEBUG_BOOL && expected_value.is_valid()) {
280 movq(expected_value, field_operand);
281 movsxlq(destination, expected_value);
283 movsxlq(destination, field_operand);
287 STATIC_ASSERT((kSmiTagSize == 1) && (kSmiTag < 32));
288 Register masked_root = scratch;
289 movl(masked_root, destination);
290 andl(masked_root, Immediate(kSmiTagMask));
292 andq(masked_root, kRootRegister);
295 addq(destination, masked_root);
296 if (DEBUG_BOOL && expected_value.is_valid()) {
298 cmpq(destination, expected_value);
299 j(equal, &check_passed);
300 RecordComment(
"Decompression failed: Tagged");
307 void MacroAssembler::RecordWriteField(Register
object,
int offset,
308 Register value, Register dst,
309 SaveFPRegsMode save_fp,
310 RememberedSetAction remembered_set_action,
311 SmiCheck smi_check) {
317 if (smi_check == INLINE_SMI_CHECK) {
318 JumpIfSmi(value, &done);
323 DCHECK(IsAligned(offset, kPointerSize));
325 leap(dst, FieldOperand(
object, offset));
326 if (emit_debug_code()) {
328 testb(dst, Immediate(kPointerSize - 1));
329 j(zero, &ok, Label::kNear);
334 RecordWrite(
object, dst, value, save_fp, remembered_set_action,
341 if (emit_debug_code()) {
342 Move(value, kZapValue, RelocInfo::NONE);
343 Move(dst, kZapValue, RelocInfo::NONE);
347 void TurboAssembler::SaveRegisters(RegList registers) {
348 DCHECK_GT(NumRegs(registers), 0);
349 for (
int i = 0;
i < Register::kNumRegisters; ++
i) {
350 if ((registers >>
i) & 1u) {
351 pushq(Register::from_code(
i));
356 void TurboAssembler::RestoreRegisters(RegList registers) {
357 DCHECK_GT(NumRegs(registers), 0);
358 for (
int i = Register::kNumRegisters - 1;
i >= 0; --
i) {
359 if ((registers >>
i) & 1u) {
360 popq(Register::from_code(
i));
365 void TurboAssembler::CallRecordWriteStub(
366 Register
object, Register address,
367 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
369 object, address, remembered_set_action, fp_mode,
370 isolate()->builtins()->builtin_handle(Builtins::kRecordWrite),
374 void TurboAssembler::CallRecordWriteStub(
375 Register
object, Register address,
376 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
377 Address wasm_target) {
378 CallRecordWriteStub(
object, address, remembered_set_action, fp_mode,
379 Handle<Code>::null(), wasm_target);
382 void TurboAssembler::CallRecordWriteStub(
383 Register
object, Register address,
384 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
385 Handle<Code> code_target, Address wasm_target) {
386 DCHECK_NE(code_target.is_null(), wasm_target == kNullAddress);
388 RecordWriteDescriptor descriptor;
389 RegList registers = descriptor.allocatable_registers();
391 SaveRegisters(registers);
393 Register object_parameter(
394 descriptor.GetRegisterParameter(RecordWriteDescriptor::kObject));
395 Register slot_parameter(
396 descriptor.GetRegisterParameter(RecordWriteDescriptor::kSlot));
397 Register remembered_set_parameter(
398 descriptor.GetRegisterParameter(RecordWriteDescriptor::kRememberedSet));
399 Register fp_mode_parameter(
400 descriptor.GetRegisterParameter(RecordWriteDescriptor::kFPMode));
405 if (slot_parameter !=
object) {
407 Move(slot_parameter, address);
408 Move(object_parameter,
object);
409 }
else if (object_parameter != address) {
413 Move(object_parameter,
object);
414 Move(slot_parameter, address);
418 xchgq(slot_parameter, object_parameter);
421 Smi smi_rsa = Smi::FromEnum(remembered_set_action);
422 Smi smi_fm = Smi::FromEnum(fp_mode);
423 Move(remembered_set_parameter, smi_rsa);
424 if (smi_rsa != smi_fm) {
425 Move(fp_mode_parameter, smi_fm);
427 movq(fp_mode_parameter, remembered_set_parameter);
429 if (code_target.is_null()) {
431 near_call(wasm_target, RelocInfo::WASM_STUB_CALL);
433 Call(code_target, RelocInfo::CODE_TARGET);
436 RestoreRegisters(registers);
439 void MacroAssembler::RecordWrite(Register
object, Register address,
440 Register value, SaveFPRegsMode fp_mode,
441 RememberedSetAction remembered_set_action,
442 SmiCheck smi_check) {
443 DCHECK(
object != value);
444 DCHECK(
object != address);
445 DCHECK(value != address);
446 AssertNotSmi(
object);
448 if (remembered_set_action == OMIT_REMEMBERED_SET &&
449 !FLAG_incremental_marking) {
453 if (emit_debug_code()) {
455 cmpp(value, Operand(address, 0));
456 j(equal, &ok, Label::kNear);
465 if (smi_check == INLINE_SMI_CHECK) {
467 JumpIfSmi(value, &done);
472 MemoryChunk::kPointersToHereAreInterestingMask, zero, &done,
475 CheckPageFlag(
object,
477 MemoryChunk::kPointersFromHereAreInterestingMask,
482 CallRecordWriteStub(
object, address, remembered_set_action, fp_mode);
487 isolate()->counters()->write_barriers_static()->Increment();
488 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
492 if (emit_debug_code()) {
493 Move(address, kZapValue, RelocInfo::NONE);
494 Move(value, kZapValue, RelocInfo::NONE);
498 void TurboAssembler::Assert(Condition cc, AbortReason reason) {
499 if (emit_debug_code()) Check(cc, reason);
502 void TurboAssembler::AssertUnreachable(AbortReason reason) {
503 if (emit_debug_code()) Abort(reason);
506 void TurboAssembler::Check(Condition cc, AbortReason reason) {
508 j(cc, &L, Label::kNear);
514 void TurboAssembler::CheckStackAlignment() {
515 int frame_alignment = base::OS::ActivationFrameAlignment();
516 int frame_alignment_mask = frame_alignment - 1;
517 if (frame_alignment > kPointerSize) {
518 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
519 Label alignment_as_expected;
520 testp(rsp, Immediate(frame_alignment_mask));
521 j(zero, &alignment_as_expected, Label::kNear);
524 bind(&alignment_as_expected);
528 void TurboAssembler::Abort(AbortReason reason) {
530 const char* msg = GetAbortReason(reason);
531 RecordComment(
"Abort message: ");
536 if (trap_on_abort()) {
541 if (should_abort_hard()) {
543 FrameScope assume_frame(
this, StackFrame::NONE);
544 movl(arg_reg_1, Immediate(static_cast<int>(reason)));
545 PrepareCallCFunction(1);
546 LoadAddress(rax, ExternalReference::abort_with_reason());
551 Move(rdx, Smi::FromInt(static_cast<int>(reason)));
556 FrameScope scope(
this, StackFrame::NONE);
557 Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
559 Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
565 void MacroAssembler::CallStub(CodeStub* stub) {
566 DCHECK(AllowThisStubCall(stub));
567 Call(stub->GetCode(), RelocInfo::CODE_TARGET);
571 void MacroAssembler::TailCallStub(CodeStub* stub) {
572 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
575 bool TurboAssembler::AllowThisStubCall(CodeStub* stub) {
576 return has_frame() || !stub->SometimesSetsUpAFrame();
579 void TurboAssembler::CallRuntimeWithCEntry(Runtime::FunctionId fid,
581 const Runtime::Function* f = Runtime::FunctionForId(fid);
587 LoadAddress(rbx, ExternalReference::Create(f));
588 DCHECK(!AreAliased(centry, rax, rbx));
589 addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
593 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
595 SaveFPRegsMode save_doubles) {
599 CHECK(f->nargs < 0 || f->nargs == num_arguments);
605 Set(rax, num_arguments);
606 LoadAddress(rbx, ExternalReference::Create(f));
608 CodeFactory::CEntry(isolate(), f->result_size, save_doubles);
609 Call(code, RelocInfo::CODE_TARGET);
612 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
623 const Runtime::Function*
function = Runtime::FunctionForId(fid);
624 DCHECK_EQ(1, function->result_size);
625 if (function->nargs >= 0) {
626 Set(rax, function->nargs);
628 JumpToExternalReference(ExternalReference::Create(fid));
631 void MacroAssembler::JumpToExternalReference(
const ExternalReference& ext,
632 bool builtin_exit_frame) {
634 LoadAddress(rbx, ext);
635 Handle<Code> code = CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs,
636 kArgvOnStack, builtin_exit_frame);
637 Jump(code, RelocInfo::CODE_TARGET);
640 static constexpr Register saved_regs[] = {rax, rcx, rdx, rbx, rbp, rsi,
641 rdi, r8, r9, r10, r11};
643 static constexpr
int kNumberOfSavedRegs =
sizeof(saved_regs) /
sizeof(Register);
645 int TurboAssembler::RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
648 Register exclusion3)
const {
650 for (
int i = 0;
i < kNumberOfSavedRegs;
i++) {
651 Register reg = saved_regs[
i];
652 if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
653 bytes += kPointerSize;
658 if (fp_mode == kSaveFPRegs) {
659 bytes += kDoubleSize * XMMRegister::kNumRegisters;
665 int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
666 Register exclusion2, Register exclusion3) {
671 for (
int i = 0;
i < kNumberOfSavedRegs;
i++) {
672 Register reg = saved_regs[
i];
673 if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
675 bytes += kPointerSize;
680 if (fp_mode == kSaveFPRegs) {
681 int delta = kDoubleSize * XMMRegister::kNumRegisters;
682 subp(rsp, Immediate(delta));
683 for (
int i = 0;
i < XMMRegister::kNumRegisters;
i++) {
684 XMMRegister reg = XMMRegister::from_code(
i);
685 Movsd(Operand(rsp,
i * kDoubleSize), reg);
693 int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
694 Register exclusion2, Register exclusion3) {
696 if (fp_mode == kSaveFPRegs) {
697 for (
int i = 0;
i < XMMRegister::kNumRegisters;
i++) {
698 XMMRegister reg = XMMRegister::from_code(
i);
699 Movsd(reg, Operand(rsp,
i * kDoubleSize));
701 int delta = kDoubleSize * XMMRegister::kNumRegisters;
702 addp(rsp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
706 for (
int i = kNumberOfSavedRegs - 1;
i >= 0;
i--) {
707 Register reg = saved_regs[
i];
708 if (reg != exclusion1 && reg != exclusion2 && reg != exclusion3) {
710 bytes += kPointerSize;
717 void TurboAssembler::Cvtss2sd(XMMRegister dst, XMMRegister src) {
718 if (CpuFeatures::IsSupported(AVX)) {
719 CpuFeatureScope scope(
this, AVX);
720 vcvtss2sd(dst, src, src);
726 void TurboAssembler::Cvtss2sd(XMMRegister dst, Operand src) {
727 if (CpuFeatures::IsSupported(AVX)) {
728 CpuFeatureScope scope(
this, AVX);
729 vcvtss2sd(dst, dst, src);
735 void TurboAssembler::Cvtsd2ss(XMMRegister dst, XMMRegister src) {
736 if (CpuFeatures::IsSupported(AVX)) {
737 CpuFeatureScope scope(
this, AVX);
738 vcvtsd2ss(dst, src, src);
744 void TurboAssembler::Cvtsd2ss(XMMRegister dst, Operand src) {
745 if (CpuFeatures::IsSupported(AVX)) {
746 CpuFeatureScope scope(
this, AVX);
747 vcvtsd2ss(dst, dst, src);
753 void TurboAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
754 if (CpuFeatures::IsSupported(AVX)) {
755 CpuFeatureScope scope(
this, AVX);
756 vxorpd(dst, dst, dst);
757 vcvtlsi2sd(dst, dst, src);
764 void TurboAssembler::Cvtlsi2sd(XMMRegister dst, Operand src) {
765 if (CpuFeatures::IsSupported(AVX)) {
766 CpuFeatureScope scope(
this, AVX);
767 vxorpd(dst, dst, dst);
768 vcvtlsi2sd(dst, dst, src);
775 void TurboAssembler::Cvtlsi2ss(XMMRegister dst, Register src) {
776 if (CpuFeatures::IsSupported(AVX)) {
777 CpuFeatureScope scope(
this, AVX);
778 vxorps(dst, dst, dst);
779 vcvtlsi2ss(dst, dst, src);
786 void TurboAssembler::Cvtlsi2ss(XMMRegister dst, Operand src) {
787 if (CpuFeatures::IsSupported(AVX)) {
788 CpuFeatureScope scope(
this, AVX);
789 vxorps(dst, dst, dst);
790 vcvtlsi2ss(dst, dst, src);
797 void TurboAssembler::Cvtqsi2ss(XMMRegister dst, Register src) {
798 if (CpuFeatures::IsSupported(AVX)) {
799 CpuFeatureScope scope(
this, AVX);
800 vxorps(dst, dst, dst);
801 vcvtqsi2ss(dst, dst, src);
808 void TurboAssembler::Cvtqsi2ss(XMMRegister dst, Operand src) {
809 if (CpuFeatures::IsSupported(AVX)) {
810 CpuFeatureScope scope(
this, AVX);
811 vxorps(dst, dst, dst);
812 vcvtqsi2ss(dst, dst, src);
819 void TurboAssembler::Cvtqsi2sd(XMMRegister dst, Register src) {
820 if (CpuFeatures::IsSupported(AVX)) {
821 CpuFeatureScope scope(
this, AVX);
822 vxorpd(dst, dst, dst);
823 vcvtqsi2sd(dst, dst, src);
830 void TurboAssembler::Cvtqsi2sd(XMMRegister dst, Operand src) {
831 if (CpuFeatures::IsSupported(AVX)) {
832 CpuFeatureScope scope(
this, AVX);
833 vxorpd(dst, dst, dst);
834 vcvtqsi2sd(dst, dst, src);
841 void TurboAssembler::Cvtlui2ss(XMMRegister dst, Register src) {
843 movl(kScratchRegister, src);
844 Cvtqsi2ss(dst, kScratchRegister);
847 void TurboAssembler::Cvtlui2ss(XMMRegister dst, Operand src) {
849 movl(kScratchRegister, src);
850 Cvtqsi2ss(dst, kScratchRegister);
853 void TurboAssembler::Cvtlui2sd(XMMRegister dst, Register src) {
855 movl(kScratchRegister, src);
856 Cvtqsi2sd(dst, kScratchRegister);
859 void TurboAssembler::Cvtlui2sd(XMMRegister dst, Operand src) {
861 movl(kScratchRegister, src);
862 Cvtqsi2sd(dst, kScratchRegister);
865 void TurboAssembler::Cvtqui2ss(XMMRegister dst, Register src) {
869 j(positive, &done, Label::kNear);
872 if (src != kScratchRegister) movq(kScratchRegister, src);
873 shrq(kScratchRegister, Immediate(1));
876 j(not_carry, &msb_not_set, Label::kNear);
877 orq(kScratchRegister, Immediate(1));
879 Cvtqsi2ss(dst, kScratchRegister);
884 void TurboAssembler::Cvtqui2ss(XMMRegister dst, Operand src) {
885 movq(kScratchRegister, src);
886 Cvtqui2ss(dst, kScratchRegister);
889 void TurboAssembler::Cvtqui2sd(XMMRegister dst, Register src) {
893 j(positive, &done, Label::kNear);
896 if (src != kScratchRegister) movq(kScratchRegister, src);
897 shrq(kScratchRegister, Immediate(1));
900 j(not_carry, &msb_not_set, Label::kNear);
901 orq(kScratchRegister, Immediate(1));
903 Cvtqsi2sd(dst, kScratchRegister);
908 void TurboAssembler::Cvtqui2sd(XMMRegister dst, Operand src) {
909 movq(kScratchRegister, src);
910 Cvtqui2sd(dst, kScratchRegister);
913 void TurboAssembler::Cvttss2si(Register dst, XMMRegister src) {
914 if (CpuFeatures::IsSupported(AVX)) {
915 CpuFeatureScope scope(
this, AVX);
916 vcvttss2si(dst, src);
922 void TurboAssembler::Cvttss2si(Register dst, Operand src) {
923 if (CpuFeatures::IsSupported(AVX)) {
924 CpuFeatureScope scope(
this, AVX);
925 vcvttss2si(dst, src);
931 void TurboAssembler::Cvttsd2si(Register dst, XMMRegister src) {
932 if (CpuFeatures::IsSupported(AVX)) {
933 CpuFeatureScope scope(
this, AVX);
934 vcvttsd2si(dst, src);
940 void TurboAssembler::Cvttsd2si(Register dst, Operand src) {
941 if (CpuFeatures::IsSupported(AVX)) {
942 CpuFeatureScope scope(
this, AVX);
943 vcvttsd2si(dst, src);
949 void TurboAssembler::Cvttss2siq(Register dst, XMMRegister src) {
950 if (CpuFeatures::IsSupported(AVX)) {
951 CpuFeatureScope scope(
this, AVX);
952 vcvttss2siq(dst, src);
954 cvttss2siq(dst, src);
958 void TurboAssembler::Cvttss2siq(Register dst, Operand src) {
959 if (CpuFeatures::IsSupported(AVX)) {
960 CpuFeatureScope scope(
this, AVX);
961 vcvttss2siq(dst, src);
963 cvttss2siq(dst, src);
967 void TurboAssembler::Cvttsd2siq(Register dst, XMMRegister src) {
968 if (CpuFeatures::IsSupported(AVX)) {
969 CpuFeatureScope scope(
this, AVX);
970 vcvttsd2siq(dst, src);
972 cvttsd2siq(dst, src);
976 void TurboAssembler::Cvttsd2siq(Register dst, Operand src) {
977 if (CpuFeatures::IsSupported(AVX)) {
978 CpuFeatureScope scope(
this, AVX);
979 vcvttsd2siq(dst, src);
981 cvttsd2siq(dst, src);
986 template <
typename OperandOrXMMRegister,
bool is_
double>
987 void ConvertFloatToUint64(TurboAssembler* tasm, Register dst,
988 OperandOrXMMRegister src, Label* fail) {
993 tasm->Cvttsd2siq(dst, src);
995 tasm->Cvttss2siq(dst, src);
998 tasm->testq(dst, dst);
999 tasm->j(positive, &success);
1004 tasm->Move(kScratchDoubleReg, -9223372036854775808.0);
1005 tasm->addsd(kScratchDoubleReg, src);
1006 tasm->Cvttsd2siq(dst, kScratchDoubleReg);
1008 tasm->Move(kScratchDoubleReg, -9223372036854775808.0f);
1009 tasm->addss(kScratchDoubleReg, src);
1010 tasm->Cvttss2siq(dst, kScratchDoubleReg);
1012 tasm->testq(dst, dst);
1015 tasm->j(negative, fail ? fail : &success);
1019 tasm->Set(kScratchRegister, 0x8000000000000000);
1020 tasm->orq(dst, kScratchRegister);
1021 tasm->bind(&success);
1025 void TurboAssembler::Cvttsd2uiq(Register dst, Operand src, Label* success) {
1026 ConvertFloatToUint64<Operand, true>(
this, dst, src, success);
1029 void TurboAssembler::Cvttsd2uiq(Register dst, XMMRegister src, Label* success) {
1030 ConvertFloatToUint64<XMMRegister, true>(
this, dst, src, success);
1033 void TurboAssembler::Cvttss2uiq(Register dst, Operand src, Label* success) {
1034 ConvertFloatToUint64<Operand, false>(
this, dst, src, success);
1037 void TurboAssembler::Cvttss2uiq(Register dst, XMMRegister src, Label* success) {
1038 ConvertFloatToUint64<XMMRegister, false>(
this, dst, src, success);
1041 void MacroAssembler::Load(Register dst, Operand src, Representation r) {
1042 DCHECK(!r.IsDouble());
1043 if (r.IsInteger8()) {
1045 }
else if (r.IsUInteger8()) {
1047 }
else if (r.IsInteger16()) {
1049 }
else if (r.IsUInteger16()) {
1051 }
else if (r.IsInteger32()) {
1058 void MacroAssembler::Store(Operand dst, Register src, Representation r) {
1059 DCHECK(!r.IsDouble());
1060 if (r.IsInteger8() || r.IsUInteger8()) {
1062 }
else if (r.IsInteger16() || r.IsUInteger16()) {
1064 }
else if (r.IsInteger32()) {
1067 if (r.IsHeapObject()) {
1069 }
else if (r.IsSmi()) {
1076 void TurboAssembler::Set(Register dst,
int64_t x) {
1079 }
else if (is_uint32(x)) {
1080 movl(dst, Immediate(static_cast<uint32_t>(x)));
1081 }
else if (is_int32(x)) {
1082 movq(dst, Immediate(static_cast<int32_t>(x)));
1088 void TurboAssembler::Set(Operand dst, intptr_t x) {
1089 if (kPointerSize == kInt64Size) {
1091 movp(dst, Immediate(static_cast<int32_t>(x)));
1093 Set(kScratchRegister, x);
1094 movp(dst, kScratchRegister);
1097 movp(dst, Immediate(static_cast<int32_t>(x)));
1105 Register TurboAssembler::GetSmiConstant(Smi source) {
1106 STATIC_ASSERT(kSmiTag == 0);
1107 int value = source->value();
1109 xorl(kScratchRegister, kScratchRegister);
1110 return kScratchRegister;
1112 Move(kScratchRegister, source);
1113 return kScratchRegister;
1116 void TurboAssembler::Move(Register dst, Smi source) {
1117 STATIC_ASSERT(kSmiTag == 0);
1118 int value = source->value();
1122 Move(dst, source.ptr(), RelocInfo::NONE);
1126 void TurboAssembler::Move(Register dst, ExternalReference ext) {
1127 if (FLAG_embedded_builtins) {
1128 if (root_array_available_ && options().isolate_independent_code) {
1129 IndirectLoadExternalReference(dst, ext);
1133 movp(dst, ext.address(), RelocInfo::EXTERNAL_REFERENCE);
1136 void MacroAssembler::SmiTag(Register dst, Register src) {
1137 STATIC_ASSERT(kSmiTag == 0);
1141 DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
1142 shlp(dst, Immediate(kSmiShift));
1145 void TurboAssembler::SmiUntag(Register dst, Register src) {
1146 STATIC_ASSERT(kSmiTag == 0);
1150 DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
1151 sarp(dst, Immediate(kSmiShift));
1154 void TurboAssembler::SmiUntag(Register dst, Operand src) {
1155 if (SmiValuesAre32Bits()) {
1156 movl(dst, Operand(src, kSmiShift / kBitsPerByte));
1160 DCHECK(SmiValuesAre31Bits());
1162 sarp(dst, Immediate(kSmiShift));
1166 void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
1172 void MacroAssembler::SmiCompare(Register dst, Smi src) {
1177 void MacroAssembler::Cmp(Register dst, Smi src) {
1178 DCHECK_NE(dst, kScratchRegister);
1179 if (src->value() == 0) {
1182 Register constant_reg = GetSmiConstant(src);
1183 cmpp(dst, constant_reg);
1187 void MacroAssembler::SmiCompare(Register dst, Operand src) {
1193 void MacroAssembler::SmiCompare(Operand dst, Register src) {
1199 void MacroAssembler::SmiCompare(Operand dst, Smi src) {
1201 if (SmiValuesAre32Bits()) {
1202 cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1204 DCHECK(SmiValuesAre31Bits());
1205 cmpl(dst, Immediate(src));
1209 void MacroAssembler::Cmp(Operand dst, Smi src) {
1211 Register smi_reg = GetSmiConstant(src);
1212 DCHECK(!dst.AddressUsesRegister(smi_reg));
1217 Condition TurboAssembler::CheckSmi(Register src) {
1218 STATIC_ASSERT(kSmiTag == 0);
1219 testb(src, Immediate(kSmiTagMask));
1223 Condition TurboAssembler::CheckSmi(Operand src) {
1224 STATIC_ASSERT(kSmiTag == 0);
1225 testb(src, Immediate(kSmiTagMask));
1229 void TurboAssembler::JumpIfSmi(Register src, Label* on_smi,
1230 Label::Distance near_jump) {
1231 Condition smi = CheckSmi(src);
1232 j(smi, on_smi, near_jump);
1235 void MacroAssembler::JumpIfNotSmi(Register src,
1237 Label::Distance near_jump) {
1238 Condition smi = CheckSmi(src);
1239 j(NegateCondition(smi), on_not_smi, near_jump);
1242 void MacroAssembler::JumpIfNotSmi(Operand src, Label* on_not_smi,
1243 Label::Distance near_jump) {
1244 Condition smi = CheckSmi(src);
1245 j(NegateCondition(smi), on_not_smi, near_jump);
1248 void MacroAssembler::SmiAddConstant(Operand dst, Smi constant) {
1249 if (constant->value() != 0) {
1250 if (SmiValuesAre32Bits()) {
1251 addl(Operand(dst, kSmiShift / kBitsPerByte),
1252 Immediate(constant->value()));
1254 DCHECK(SmiValuesAre31Bits());
1255 if (kPointerSize == kInt64Size) {
1257 movl(kScratchRegister, dst);
1258 addl(kScratchRegister, Immediate(constant));
1259 movsxlq(kScratchRegister, kScratchRegister);
1260 movq(dst, kScratchRegister);
1262 DCHECK_EQ(kSmiShiftSize, 32);
1263 addp(dst, Immediate(constant));
1269 SmiIndex MacroAssembler::SmiToIndex(Register dst,
1272 if (SmiValuesAre32Bits()) {
1273 DCHECK(is_uint6(shift));
1279 if (shift < kSmiShift) {
1280 sarp(dst, Immediate(kSmiShift - shift));
1282 shlp(dst, Immediate(shift - kSmiShift));
1284 return SmiIndex(dst, times_1);
1286 DCHECK(SmiValuesAre31Bits());
1293 if (shift < kSmiShift) {
1294 sarq(dst, Immediate(kSmiShift - shift));
1295 }
else if (shift != kSmiShift) {
1296 if (shift - kSmiShift <= static_cast<int>(times_8)) {
1297 return SmiIndex(dst, static_cast<ScaleFactor>(shift - kSmiShift));
1299 shlq(dst, Immediate(shift - kSmiShift));
1301 return SmiIndex(dst, times_1);
1305 void TurboAssembler::Push(Smi source) {
1306 intptr_t smi =
static_cast<intptr_t
>(source.ptr());
1307 if (is_int32(smi)) {
1308 Push(Immediate(static_cast<int32_t>(smi)));
1311 int first_byte_set = base::bits::CountTrailingZeros64(smi) / 8;
1312 int last_byte_set = (63 - base::bits::CountLeadingZeros64(smi)) / 8;
1313 if (first_byte_set == last_byte_set && kPointerSize == kInt64Size) {
1316 movb(Operand(rsp, first_byte_set),
1317 Immediate(static_cast<int8_t>(smi >> (8 * first_byte_set))));
1320 Register constant = GetSmiConstant(source);
1326 void TurboAssembler::Move(Register dst, Register src) {
1332 void TurboAssembler::MoveNumber(Register dst,
double value) {
1334 if (DoubleToSmiInteger(value, &smi)) {
1335 Move(dst, Smi::FromInt(smi));
1337 movp_heap_number(dst, value);
1341 void TurboAssembler::Move(XMMRegister dst,
uint32_t src) {
1345 unsigned nlz = base::bits::CountLeadingZeros(src);
1346 unsigned ntz = base::bits::CountTrailingZeros(src);
1347 unsigned pop = base::bits::CountPopulation(src);
1349 if (pop + ntz + nlz == 32) {
1351 if (ntz) Pslld(dst, static_cast<byte>(ntz + nlz));
1352 if (nlz) Psrld(dst, static_cast<byte>(nlz));
1354 movl(kScratchRegister, Immediate(src));
1355 Movd(dst, kScratchRegister);
1360 void TurboAssembler::Move(XMMRegister dst, uint64_t src) {
1364 unsigned nlz = base::bits::CountLeadingZeros(src);
1365 unsigned ntz = base::bits::CountTrailingZeros(src);
1366 unsigned pop = base::bits::CountPopulation(src);
1368 if (pop + ntz + nlz == 64) {
1370 if (ntz) Psllq(dst, static_cast<byte>(ntz + nlz));
1371 if (nlz) Psrlq(dst, static_cast<byte>(nlz));
1378 movq(kScratchRegister, src);
1379 Movq(dst, kScratchRegister);
1387 void MacroAssembler::Absps(XMMRegister dst) {
1388 Andps(dst, ExternalReferenceAsOperand(
1389 ExternalReference::address_of_float_abs_constant()));
1392 void MacroAssembler::Negps(XMMRegister dst) {
1393 Xorps(dst, ExternalReferenceAsOperand(
1394 ExternalReference::address_of_float_neg_constant()));
1397 void MacroAssembler::Abspd(XMMRegister dst) {
1398 Andps(dst, ExternalReferenceAsOperand(
1399 ExternalReference::address_of_double_abs_constant()));
1402 void MacroAssembler::Negpd(XMMRegister dst) {
1403 Xorps(dst, ExternalReferenceAsOperand(
1404 ExternalReference::address_of_double_neg_constant()));
1407 void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
1408 AllowDeferredHandleDereference smi_check;
1409 if (source->IsSmi()) {
1410 Cmp(dst, Smi::cast(*source));
1412 Move(kScratchRegister, Handle<HeapObject>::cast(source));
1413 cmpp(dst, kScratchRegister);
1417 void MacroAssembler::Cmp(Operand dst, Handle<Object> source) {
1418 AllowDeferredHandleDereference smi_check;
1419 if (source->IsSmi()) {
1420 Cmp(dst, Smi::cast(*source));
1422 Move(kScratchRegister, Handle<HeapObject>::cast(source));
1423 cmpp(dst, kScratchRegister);
1427 void TurboAssembler::Push(Handle<HeapObject> source) {
1428 Move(kScratchRegister, source);
1429 Push(kScratchRegister);
1432 void TurboAssembler::Move(Register result, Handle<HeapObject>
object,
1433 RelocInfo::Mode rmode) {
1434 if (FLAG_embedded_builtins) {
1435 if (root_array_available_ && options().isolate_independent_code) {
1436 IndirectLoadConstant(result,
object);
1440 movp(result,
object.address(), rmode);
1443 void TurboAssembler::Move(Operand dst, Handle<HeapObject>
object,
1444 RelocInfo::Mode rmode) {
1445 Move(kScratchRegister,
object, rmode);
1446 movp(dst, kScratchRegister);
1449 void TurboAssembler::MoveStringConstant(Register result,
1450 const StringConstantBase*
string,
1451 RelocInfo::Mode rmode) {
1452 movp_string(result,
string);
1455 void MacroAssembler::Drop(
int stack_elements) {
1456 if (stack_elements > 0) {
1457 addp(rsp, Immediate(stack_elements * kPointerSize));
1462 void MacroAssembler::DropUnderReturnAddress(
int stack_elements,
1464 DCHECK_GT(stack_elements, 0);
1465 if (kPointerSize == kInt64Size && stack_elements == 1) {
1466 popq(MemOperand(rsp, 0));
1470 PopReturnAddressTo(scratch);
1471 Drop(stack_elements);
1472 PushReturnAddressFrom(scratch);
1475 void TurboAssembler::Push(Register src) {
1476 if (kPointerSize == kInt64Size) {
1480 DCHECK(src.code() != rbp.code());
1481 leal(rsp, Operand(rsp, -4));
1482 movp(Operand(rsp, 0), src);
1486 void TurboAssembler::Push(Operand src) {
1487 if (kPointerSize == kInt64Size) {
1490 movp(kScratchRegister, src);
1491 leal(rsp, Operand(rsp, -4));
1492 movp(Operand(rsp, 0), kScratchRegister);
1496 void MacroAssembler::PushQuad(Operand src) {
1497 if (kPointerSize == kInt64Size) {
1500 movp(kScratchRegister, src);
1501 pushq(kScratchRegister);
1505 void TurboAssembler::Push(Immediate value) {
1506 if (kPointerSize == kInt64Size) {
1509 leal(rsp, Operand(rsp, -4));
1510 movp(Operand(rsp, 0), value);
1515 void MacroAssembler::PushImm32(int32_t imm32) {
1516 if (kPointerSize == kInt64Size) {
1519 leal(rsp, Operand(rsp, -4));
1520 movp(Operand(rsp, 0), Immediate(imm32));
1525 void MacroAssembler::Pop(Register dst) {
1526 if (kPointerSize == kInt64Size) {
1530 DCHECK(dst.code() != rbp.code());
1531 movp(dst, Operand(rsp, 0));
1532 leal(rsp, Operand(rsp, 4));
1536 void MacroAssembler::Pop(Operand dst) {
1537 if (kPointerSize == kInt64Size) {
1540 Register scratch = dst.AddressUsesRegister(kScratchRegister)
1541 ? kRootRegister : kScratchRegister;
1542 movp(scratch, Operand(rsp, 0));
1544 leal(rsp, Operand(rsp, 4));
1545 if (scratch == kRootRegister) {
1547 InitializeRootRegister();
1552 void MacroAssembler::PopQuad(Operand dst) {
1553 if (kPointerSize == kInt64Size) {
1556 popq(kScratchRegister);
1557 movp(dst, kScratchRegister);
1561 void TurboAssembler::Jump(ExternalReference ext) {
1562 LoadAddress(kScratchRegister, ext);
1563 jmp(kScratchRegister);
1566 void TurboAssembler::Jump(Operand op) {
1567 if (kPointerSize == kInt64Size) {
1570 movp(kScratchRegister, op);
1571 jmp(kScratchRegister);
1575 void TurboAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1576 Move(kScratchRegister, destination, rmode);
1577 jmp(kScratchRegister);
1580 void TurboAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
1583 if (FLAG_embedded_builtins) {
1584 if (root_array_available_ && options().isolate_independent_code &&
1585 !Builtins::IsIsolateIndependentBuiltin(*code_object)) {
1593 if (cc == never)
return;
1594 j(NegateCondition(cc), &skip, Label::kNear);
1596 IndirectLoadConstant(kScratchRegister, code_object);
1597 leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
1598 jmp(kScratchRegister);
1601 }
else if (options().inline_offheap_trampolines) {
1602 int builtin_index = Builtins::kNoBuiltinId;
1603 if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
1604 Builtins::IsIsolateIndependent(builtin_index)) {
1606 RecordCommentForOffHeapTrampoline(builtin_index);
1607 CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
1608 EmbeddedData d = EmbeddedData::FromBlob();
1609 Address entry = d.InstructionStartOfBuiltin(builtin_index);
1610 Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET);
1611 jmp(kScratchRegister);
1616 j(cc, code_object, rmode);
1619 void MacroAssembler::JumpToInstructionStream(Address entry) {
1620 Move(kOffHeapTrampolineRegister, entry, RelocInfo::OFF_HEAP_TARGET);
1621 jmp(kOffHeapTrampolineRegister);
1624 void TurboAssembler::Call(ExternalReference ext) {
1625 LoadAddress(kScratchRegister, ext);
1626 call(kScratchRegister);
1629 void TurboAssembler::Call(Operand op) {
1630 if (kPointerSize == kInt64Size && !CpuFeatures::IsSupported(ATOM)) {
1633 movp(kScratchRegister, op);
1634 call(kScratchRegister);
1638 void TurboAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1639 Move(kScratchRegister, destination, rmode);
1640 call(kScratchRegister);
1643 void TurboAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1644 if (FLAG_embedded_builtins) {
1645 if (root_array_available_ && options().isolate_independent_code &&
1646 !Builtins::IsIsolateIndependentBuiltin(*code_object)) {
1652 IndirectLoadConstant(kScratchRegister, code_object);
1653 leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
1654 call(kScratchRegister);
1656 }
else if (options().inline_offheap_trampolines) {
1657 int builtin_index = Builtins::kNoBuiltinId;
1658 if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) &&
1659 Builtins::IsIsolateIndependent(builtin_index)) {
1661 RecordCommentForOffHeapTrampoline(builtin_index);
1662 CHECK_NE(builtin_index, Builtins::kNoBuiltinId);
1663 EmbeddedData d = EmbeddedData::FromBlob();
1664 Address entry = d.InstructionStartOfBuiltin(builtin_index);
1665 Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET);
1666 call(kScratchRegister);
1671 DCHECK(RelocInfo::IsCodeTarget(rmode));
1672 call(code_object, rmode);
1675 void TurboAssembler::RetpolineCall(Register reg) {
1676 Label setup_return, setup_target, inner_indirect_branch, capture_spec;
1680 bind(&inner_indirect_branch);
1681 call(&setup_target);
1683 bind(&capture_spec);
1687 bind(&setup_target);
1688 movq(Operand(rsp, 0), reg);
1691 bind(&setup_return);
1692 call(&inner_indirect_branch);
1695 void TurboAssembler::RetpolineCall(Address destination, RelocInfo::Mode rmode) {
1696 Move(kScratchRegister, destination, rmode);
1697 RetpolineCall(kScratchRegister);
1700 void TurboAssembler::RetpolineJump(Register reg) {
1701 Label setup_target, capture_spec;
1703 call(&setup_target);
1705 bind(&capture_spec);
1709 bind(&setup_target);
1710 movq(Operand(rsp, 0), reg);
1714 void TurboAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
1719 if (CpuFeatures::IsSupported(SSE4_1)) {
1720 CpuFeatureScope sse_scope(
this, SSE4_1);
1721 pextrd(dst, src, imm8);
1726 shrq(dst, Immediate(32));
1729 void TurboAssembler::Pinsrd(XMMRegister dst, Register src, int8_t imm8) {
1730 if (CpuFeatures::IsSupported(SSE4_1)) {
1731 CpuFeatureScope sse_scope(
this, SSE4_1);
1732 pinsrd(dst, src, imm8);
1735 Movd(kScratchDoubleReg, src);
1737 punpckldq(dst, kScratchDoubleReg);
1740 Movss(dst, kScratchDoubleReg);
1744 void TurboAssembler::Pinsrd(XMMRegister dst, Operand src, int8_t imm8) {
1745 if (CpuFeatures::IsSupported(SSE4_1)) {
1746 CpuFeatureScope sse_scope(
this, SSE4_1);
1747 pinsrd(dst, src, imm8);
1750 Movd(kScratchDoubleReg, src);
1752 punpckldq(dst, kScratchDoubleReg);
1755 Movss(dst, kScratchDoubleReg);
1759 void TurboAssembler::Lzcntl(Register dst, Register src) {
1760 if (CpuFeatures::IsSupported(LZCNT)) {
1761 CpuFeatureScope scope(
this, LZCNT);
1767 j(not_zero, ¬_zero_src, Label::kNear);
1769 bind(¬_zero_src);
1770 xorl(dst, Immediate(31));
1773 void TurboAssembler::Lzcntl(Register dst, Operand src) {
1774 if (CpuFeatures::IsSupported(LZCNT)) {
1775 CpuFeatureScope scope(
this, LZCNT);
1781 j(not_zero, ¬_zero_src, Label::kNear);
1783 bind(¬_zero_src);
1784 xorl(dst, Immediate(31));
1787 void TurboAssembler::Lzcntq(Register dst, Register src) {
1788 if (CpuFeatures::IsSupported(LZCNT)) {
1789 CpuFeatureScope scope(
this, LZCNT);
1795 j(not_zero, ¬_zero_src, Label::kNear);
1797 bind(¬_zero_src);
1798 xorl(dst, Immediate(63));
1801 void TurboAssembler::Lzcntq(Register dst, Operand src) {
1802 if (CpuFeatures::IsSupported(LZCNT)) {
1803 CpuFeatureScope scope(
this, LZCNT);
1809 j(not_zero, ¬_zero_src, Label::kNear);
1811 bind(¬_zero_src);
1812 xorl(dst, Immediate(63));
1815 void TurboAssembler::Tzcntq(Register dst, Register src) {
1816 if (CpuFeatures::IsSupported(BMI1)) {
1817 CpuFeatureScope scope(
this, BMI1);
1823 j(not_zero, ¬_zero_src, Label::kNear);
1826 bind(¬_zero_src);
1829 void TurboAssembler::Tzcntq(Register dst, Operand src) {
1830 if (CpuFeatures::IsSupported(BMI1)) {
1831 CpuFeatureScope scope(
this, BMI1);
1837 j(not_zero, ¬_zero_src, Label::kNear);
1840 bind(¬_zero_src);
1843 void TurboAssembler::Tzcntl(Register dst, Register src) {
1844 if (CpuFeatures::IsSupported(BMI1)) {
1845 CpuFeatureScope scope(
this, BMI1);
1851 j(not_zero, ¬_zero_src, Label::kNear);
1853 bind(¬_zero_src);
1856 void TurboAssembler::Tzcntl(Register dst, Operand src) {
1857 if (CpuFeatures::IsSupported(BMI1)) {
1858 CpuFeatureScope scope(
this, BMI1);
1864 j(not_zero, ¬_zero_src, Label::kNear);
1866 bind(¬_zero_src);
1869 void TurboAssembler::Popcntl(Register dst, Register src) {
1870 if (CpuFeatures::IsSupported(POPCNT)) {
1871 CpuFeatureScope scope(
this, POPCNT);
1878 void TurboAssembler::Popcntl(Register dst, Operand src) {
1879 if (CpuFeatures::IsSupported(POPCNT)) {
1880 CpuFeatureScope scope(
this, POPCNT);
1887 void TurboAssembler::Popcntq(Register dst, Register src) {
1888 if (CpuFeatures::IsSupported(POPCNT)) {
1889 CpuFeatureScope scope(
this, POPCNT);
1896 void TurboAssembler::Popcntq(Register dst, Operand src) {
1897 if (CpuFeatures::IsSupported(POPCNT)) {
1898 CpuFeatureScope scope(
this, POPCNT);
1906 void MacroAssembler::Pushad() {
1922 STATIC_ASSERT(12 == kNumSafepointSavedRegisters);
1925 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
1926 leap(rsp, Operand(rsp, -sp_delta));
1930 void MacroAssembler::Popad() {
1933 (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
1934 leap(rsp, Operand(rsp, sp_delta));
1953 MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
1972 void MacroAssembler::PushStackHandler() {
1974 STATIC_ASSERT(StackHandlerConstants::kSize == 2 * kPointerSize);
1975 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1980 ExternalReference handler_address =
1981 ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
1982 Push(ExternalReferenceAsOperand(handler_address));
1985 movp(ExternalReferenceAsOperand(handler_address), rsp);
1989 void MacroAssembler::PopStackHandler() {
1990 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1991 ExternalReference handler_address =
1992 ExternalReference::Create(IsolateAddressId::kHandlerAddress, isolate());
1993 Pop(ExternalReferenceAsOperand(handler_address));
1994 addp(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1997 void TurboAssembler::Ret() { ret(0); }
1999 void TurboAssembler::Ret(
int bytes_dropped, Register scratch) {
2000 if (is_uint16(bytes_dropped)) {
2003 PopReturnAddressTo(scratch);
2004 addp(rsp, Immediate(bytes_dropped));
2005 PushReturnAddressFrom(scratch);
2010 void MacroAssembler::CmpObjectType(Register heap_object,
2013 movp(map, FieldOperand(heap_object, HeapObject::kMapOffset));
2014 CmpInstanceType(map, type);
2018 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
2019 cmpw(FieldOperand(map, Map::kInstanceTypeOffset), Immediate(type));
2022 void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
2023 XMMRegister scratch, Label* lost_precision,
2024 Label* is_nan, Label::Distance dst) {
2025 Cvttsd2si(result_reg, input_reg);
2026 Cvtlsi2sd(kScratchDoubleReg, result_reg);
2027 Ucomisd(kScratchDoubleReg, input_reg);
2028 j(not_equal, lost_precision, dst);
2029 j(parity_even, is_nan, dst);
2033 void MacroAssembler::AssertNotSmi(Register
object) {
2034 if (emit_debug_code()) {
2035 Condition is_smi = CheckSmi(
object);
2036 Check(NegateCondition(is_smi), AbortReason::kOperandIsASmi);
2041 void MacroAssembler::AssertSmi(Register
object) {
2042 if (emit_debug_code()) {
2043 Condition is_smi = CheckSmi(
object);
2044 Check(is_smi, AbortReason::kOperandIsNotASmi);
2048 void MacroAssembler::AssertSmi(Operand
object) {
2049 if (emit_debug_code()) {
2050 Condition is_smi = CheckSmi(
object);
2051 Check(is_smi, AbortReason::kOperandIsNotASmi);
2055 void TurboAssembler::AssertZeroExtended(Register int32_register) {
2056 if (emit_debug_code()) {
2057 DCHECK_NE(int32_register, kScratchRegister);
2058 movq(kScratchRegister,
int64_t{0x0000000100000000});
2059 cmpq(kScratchRegister, int32_register);
2060 Check(above_equal, AbortReason::k32BitValueInRegisterIsNotZeroExtended);
2064 void MacroAssembler::AssertConstructor(Register
object) {
2065 if (emit_debug_code()) {
2066 testb(
object, Immediate(kSmiTagMask));
2067 Check(not_equal, AbortReason::kOperandIsASmiAndNotAConstructor);
2069 movq(
object, FieldOperand(
object, HeapObject::kMapOffset));
2070 testb(FieldOperand(
object, Map::kBitFieldOffset),
2071 Immediate(Map::IsConstructorBit::kMask));
2073 Check(not_zero, AbortReason::kOperandIsNotAConstructor);
2077 void MacroAssembler::AssertFunction(Register
object) {
2078 if (emit_debug_code()) {
2079 testb(
object, Immediate(kSmiTagMask));
2080 Check(not_equal, AbortReason::kOperandIsASmiAndNotAFunction);
2082 CmpObjectType(
object, JS_FUNCTION_TYPE,
object);
2084 Check(equal, AbortReason::kOperandIsNotAFunction);
2089 void MacroAssembler::AssertBoundFunction(Register
object) {
2090 if (emit_debug_code()) {
2091 testb(
object, Immediate(kSmiTagMask));
2092 Check(not_equal, AbortReason::kOperandIsASmiAndNotABoundFunction);
2094 CmpObjectType(
object, JS_BOUND_FUNCTION_TYPE,
object);
2096 Check(equal, AbortReason::kOperandIsNotABoundFunction);
2100 void MacroAssembler::AssertGeneratorObject(Register
object) {
2101 if (!emit_debug_code())
return;
2102 testb(
object, Immediate(kSmiTagMask));
2103 Check(not_equal, AbortReason::kOperandIsASmiAndNotAGeneratorObject);
2106 Register map = object;
2108 movp(map, FieldOperand(
object, HeapObject::kMapOffset));
2112 CmpInstanceType(map, JS_GENERATOR_OBJECT_TYPE);
2113 j(equal, &do_check);
2116 CmpInstanceType(map, JS_ASYNC_FUNCTION_OBJECT_TYPE);
2117 j(equal, &do_check);
2120 CmpInstanceType(map, JS_ASYNC_GENERATOR_OBJECT_TYPE);
2125 Check(equal, AbortReason::kOperandIsNotAGeneratorObject);
2128 void MacroAssembler::AssertUndefinedOrAllocationSite(Register
object) {
2129 if (emit_debug_code()) {
2130 Label done_checking;
2131 AssertNotSmi(
object);
2132 Cmp(
object, isolate()->factory()->undefined_value());
2133 j(equal, &done_checking);
2134 Cmp(FieldOperand(
object, 0), isolate()->factory()->allocation_site_map());
2135 Assert(equal, AbortReason::kExpectedUndefinedOrCell);
2136 bind(&done_checking);
2140 void MacroAssembler::LoadWeakValue(Register in_out, Label* target_if_cleared) {
2141 cmpl(in_out, Immediate(kClearedWeakHeapObjectLower32));
2142 j(equal, target_if_cleared);
2144 andp(in_out, Immediate(~static_cast<int32_t>(kWeakHeapObjectMask)));
2147 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value) {
2148 DCHECK_GT(value, 0);
2149 if (FLAG_native_code_counters && counter->Enabled()) {
2150 Operand counter_operand =
2151 ExternalReferenceAsOperand(ExternalReference::Create(counter));
2153 incl(counter_operand);
2155 addl(counter_operand, Immediate(value));
2161 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value) {
2162 DCHECK_GT(value, 0);
2163 if (FLAG_native_code_counters && counter->Enabled()) {
2164 Operand counter_operand =
2165 ExternalReferenceAsOperand(ExternalReference::Create(counter));
2167 decl(counter_operand);
2169 subl(counter_operand, Immediate(value));
2174 void MacroAssembler::MaybeDropFrames() {
2176 ExternalReference restart_fp =
2177 ExternalReference::debug_restart_fp_address(isolate());
2178 Load(rbx, restart_fp);
2182 j(zero, &dont_drop, Label::kNear);
2183 Jump(BUILTIN_CODE(isolate(), FrameDropperTrampoline), RelocInfo::CODE_TARGET);
2188 void TurboAssembler::PrepareForTailCall(
const ParameterCount& callee_args_count,
2189 Register caller_args_count_reg,
2190 Register scratch0, Register scratch1) {
2192 if (callee_args_count.is_reg()) {
2193 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
2196 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
2202 Register new_sp_reg = scratch0;
2203 if (callee_args_count.is_reg()) {
2204 subp(caller_args_count_reg, callee_args_count.reg());
2205 leap(new_sp_reg, Operand(rbp, caller_args_count_reg, times_pointer_size,
2206 StandardFrameConstants::kCallerPCOffset));
2208 leap(new_sp_reg, Operand(rbp, caller_args_count_reg, times_pointer_size,
2209 StandardFrameConstants::kCallerPCOffset -
2210 callee_args_count.immediate() * kPointerSize));
2213 if (FLAG_debug_code) {
2214 cmpp(rsp, new_sp_reg);
2215 Check(below, AbortReason::kStackAccessBelowStackPointer);
2221 Register tmp_reg = scratch1;
2222 movp(tmp_reg, Operand(rbp, StandardFrameConstants::kCallerPCOffset));
2223 movp(Operand(rsp, 0), tmp_reg);
2227 movp(rbp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2230 Register count_reg = caller_args_count_reg;
2231 if (callee_args_count.is_reg()) {
2232 leap(count_reg, Operand(callee_args_count.reg(), 2));
2234 movp(count_reg, Immediate(callee_args_count.immediate() + 2));
2241 jmp(&entry, Label::kNear);
2244 movp(tmp_reg, Operand(rsp, count_reg, times_pointer_size, 0));
2245 movp(Operand(new_sp_reg, count_reg, times_pointer_size, 0), tmp_reg);
2247 cmpp(count_reg, Immediate(0));
2248 j(not_equal, &loop, Label::kNear);
2251 movp(rsp, new_sp_reg);
2254 void MacroAssembler::InvokeFunction(Register
function, Register new_target,
2255 const ParameterCount& actual,
2257 movp(rbx, FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
2259 FieldOperand(rbx, SharedFunctionInfo::kFormalParameterCountOffset));
2261 ParameterCount expected(rbx);
2262 InvokeFunction(
function, new_target, expected, actual, flag);
2265 void MacroAssembler::InvokeFunction(Register
function, Register new_target,
2266 const ParameterCount& expected,
2267 const ParameterCount& actual,
2269 DCHECK(
function == rdi);
2270 movp(rsi, FieldOperand(
function, JSFunction::kContextOffset));
2271 InvokeFunctionCode(rdi, new_target, expected, actual, flag);
2274 void MacroAssembler::InvokeFunctionCode(Register
function, Register new_target,
2275 const ParameterCount& expected,
2276 const ParameterCount& actual,
2279 DCHECK(flag == JUMP_FUNCTION || has_frame());
2280 DCHECK(
function == rdi);
2281 DCHECK_IMPLIES(new_target.is_valid(), new_target == rdx);
2284 CheckDebugHook(
function, new_target, expected, actual);
2287 if (!new_target.is_valid()) {
2288 LoadRoot(rdx, RootIndex::kUndefinedValue);
2292 bool definitely_mismatches =
false;
2293 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
2295 if (!definitely_mismatches) {
2299 static_assert(kJavaScriptCallCodeStartRegister == rcx,
"ABI mismatch");
2300 movp(rcx, FieldOperand(
function, JSFunction::kCodeOffset));
2301 addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2302 if (flag == CALL_FUNCTION) {
2305 DCHECK(flag == JUMP_FUNCTION);
2312 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
2313 const ParameterCount& actual, Label* done,
2314 bool* definitely_mismatches,
2316 Label::Distance near_jump) {
2317 bool definitely_matches =
false;
2318 *definitely_mismatches =
false;
2320 if (expected.is_immediate()) {
2321 DCHECK(actual.is_immediate());
2322 Set(rax, actual.immediate());
2323 if (expected.immediate() == actual.immediate()) {
2324 definitely_matches =
true;
2326 if (expected.immediate() ==
2327 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
2332 definitely_matches =
true;
2334 *definitely_mismatches =
true;
2335 Set(rbx, expected.immediate());
2339 if (actual.is_immediate()) {
2343 Set(rax, actual.immediate());
2344 cmpp(expected.reg(), Immediate(actual.immediate()));
2345 j(equal, &invoke, Label::kNear);
2346 DCHECK(expected.reg() == rbx);
2347 }
else if (expected.reg() != actual.reg()) {
2350 cmpp(expected.reg(), actual.reg());
2351 j(equal, &invoke, Label::kNear);
2352 DCHECK(actual.reg() == rax);
2353 DCHECK(expected.reg() == rbx);
2355 definitely_matches =
true;
2356 Move(rax, actual.reg());
2360 if (!definitely_matches) {
2361 Handle<Code> adaptor = BUILTIN_CODE(isolate(), ArgumentsAdaptorTrampoline);
2362 if (flag == CALL_FUNCTION) {
2363 Call(adaptor, RelocInfo::CODE_TARGET);
2364 if (!*definitely_mismatches) {
2365 jmp(done, near_jump);
2368 Jump(adaptor, RelocInfo::CODE_TARGET);
2374 void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
2375 const ParameterCount& expected,
2376 const ParameterCount& actual) {
2378 ExternalReference debug_hook_active =
2379 ExternalReference::debug_hook_on_function_call_address(isolate());
2380 Operand debug_hook_active_operand =
2381 ExternalReferenceAsOperand(debug_hook_active);
2382 cmpb(debug_hook_active_operand, Immediate(0));
2383 j(equal, &skip_hook);
2386 FrameScope frame(
this,
2387 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
2388 if (expected.is_reg()) {
2389 SmiTag(expected.reg(), expected.reg());
2390 Push(expected.reg());
2392 if (actual.is_reg()) {
2393 SmiTag(actual.reg(), actual.reg());
2395 SmiUntag(actual.reg(), actual.reg());
2397 if (new_target.is_valid()) {
2402 Push(StackArgumentsAccessor(rbp, actual).GetReceiverOperand());
2403 CallRuntime(Runtime::kDebugOnFunctionCall);
2405 if (new_target.is_valid()) {
2408 if (actual.is_reg()) {
2410 SmiUntag(actual.reg(), actual.reg());
2412 if (expected.is_reg()) {
2413 Pop(expected.reg());
2414 SmiUntag(expected.reg(), expected.reg());
2420 void TurboAssembler::StubPrologue(StackFrame::Type type) {
2423 Push(Immediate(StackFrame::TypeToMarker(type)));
2426 void TurboAssembler::Prologue() {
2433 void TurboAssembler::EnterFrame(StackFrame::Type type) {
2436 Push(Immediate(StackFrame::TypeToMarker(type)));
2439 void TurboAssembler::LeaveFrame(StackFrame::Type type) {
2440 if (emit_debug_code()) {
2441 cmpp(Operand(rbp, CommonFrameConstants::kContextOrFrameTypeOffset),
2442 Immediate(StackFrame::TypeToMarker(type)));
2443 Check(equal, AbortReason::kStackFrameTypesMustMatch);
2449 void MacroAssembler::EnterExitFramePrologue(
bool save_rax,
2450 StackFrame::Type frame_type) {
2451 DCHECK(frame_type == StackFrame::EXIT ||
2452 frame_type == StackFrame::BUILTIN_EXIT);
2456 DCHECK_EQ(kFPOnStackSize + kPCOnStackSize,
2457 ExitFrameConstants::kCallerSPDisplacement);
2458 DCHECK_EQ(kFPOnStackSize, ExitFrameConstants::kCallerPCOffset);
2459 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
2464 Push(Immediate(StackFrame::TypeToMarker(frame_type)));
2465 DCHECK_EQ(-2 * kPointerSize, ExitFrameConstants::kSPOffset);
2467 Move(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2468 Push(kScratchRegister);
2476 ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate()),
2478 Store(ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()),
2481 ExternalReference::Create(IsolateAddressId::kCFunctionAddress, isolate()),
2486 void MacroAssembler::EnterExitFrameEpilogue(
int arg_stack_space,
2487 bool save_doubles) {
2489 const int kShadowSpace = 4;
2490 arg_stack_space += kShadowSpace;
2494 int space = XMMRegister::kNumRegisters * kDoubleSize +
2495 arg_stack_space * kRegisterSize;
2496 subp(rsp, Immediate(space));
2497 int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
2498 const RegisterConfiguration* config = RegisterConfiguration::Default();
2499 for (
int i = 0;
i < config->num_allocatable_double_registers(); ++
i) {
2500 DoubleRegister reg =
2501 DoubleRegister::from_code(config->GetAllocatableDoubleCode(
i));
2502 Movsd(Operand(rbp, offset - ((
i + 1) * kDoubleSize)), reg);
2504 }
else if (arg_stack_space > 0) {
2505 subp(rsp, Immediate(arg_stack_space * kRegisterSize));
2509 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
2510 if (kFrameAlignment > 0) {
2511 DCHECK(base::bits::IsPowerOfTwo(kFrameAlignment));
2512 DCHECK(is_int8(kFrameAlignment));
2513 andp(rsp, Immediate(-kFrameAlignment));
2517 movp(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2520 void MacroAssembler::EnterExitFrame(
int arg_stack_space,
bool save_doubles,
2521 StackFrame::Type frame_type) {
2522 EnterExitFramePrologue(
true, frame_type);
2526 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2527 leap(r15, Operand(rbp, r14, times_pointer_size, offset));
2529 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
2533 void MacroAssembler::EnterApiExitFrame(
int arg_stack_space) {
2534 EnterExitFramePrologue(
false, StackFrame::EXIT);
2535 EnterExitFrameEpilogue(arg_stack_space,
false);
2539 void MacroAssembler::LeaveExitFrame(
bool save_doubles,
bool pop_arguments) {
2543 int offset = -ExitFrameConstants::kFixedFrameSizeFromFp;
2544 const RegisterConfiguration* config = RegisterConfiguration::Default();
2545 for (
int i = 0;
i < config->num_allocatable_double_registers(); ++
i) {
2546 DoubleRegister reg =
2547 DoubleRegister::from_code(config->GetAllocatableDoubleCode(
i));
2548 Movsd(reg, Operand(rbp, offset - ((
i + 1) * kDoubleSize)));
2552 if (pop_arguments) {
2554 movp(rcx, Operand(rbp, kFPOnStackSize));
2555 movp(rbp, Operand(rbp, 0 * kPointerSize));
2559 leap(rsp, Operand(r15, 1 * kPointerSize));
2561 PushReturnAddressFrom(rcx);
2567 LeaveExitFrameEpilogue();
2570 void MacroAssembler::LeaveApiExitFrame() {
2574 LeaveExitFrameEpilogue();
2577 void MacroAssembler::LeaveExitFrameEpilogue() {
2579 ExternalReference context_address =
2580 ExternalReference::Create(IsolateAddressId::kContextAddress, isolate());
2581 Operand context_operand = ExternalReferenceAsOperand(context_address);
2582 movp(rsi, context_operand);
2584 movp(context_operand, Immediate(Context::kInvalidContext));
2588 ExternalReference c_entry_fp_address =
2589 ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate());
2590 Operand c_entry_fp_operand = ExternalReferenceAsOperand(c_entry_fp_address);
2591 movp(c_entry_fp_operand, Immediate(0));
2596 static const int kRegisterPassedArguments = 4;
2598 static const int kRegisterPassedArguments = 6;
2602 void MacroAssembler::LoadNativeContextSlot(
int index, Register dst) {
2603 movp(dst, NativeContextOperand());
2604 movp(dst, ContextOperand(dst, index));
2608 int TurboAssembler::ArgumentStackSlotsForCFunctionCall(
int num_arguments) {
2615 DCHECK_GE(num_arguments, 0);
2617 const int kMinimumStackSlots = kRegisterPassedArguments;
2618 if (num_arguments < kMinimumStackSlots)
return kMinimumStackSlots;
2619 return num_arguments;
2621 if (num_arguments < kRegisterPassedArguments)
return 0;
2622 return num_arguments - kRegisterPassedArguments;
2626 void TurboAssembler::PrepareCallCFunction(
int num_arguments) {
2627 int frame_alignment = base::OS::ActivationFrameAlignment();
2628 DCHECK_NE(frame_alignment, 0);
2629 DCHECK_GE(num_arguments, 0);
2632 movp(kScratchRegister, rsp);
2633 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
2634 int argument_slots_on_stack =
2635 ArgumentStackSlotsForCFunctionCall(num_arguments);
2636 subp(rsp, Immediate((argument_slots_on_stack + 1) * kRegisterSize));
2637 andp(rsp, Immediate(-frame_alignment));
2638 movp(Operand(rsp, argument_slots_on_stack * kRegisterSize), kScratchRegister);
2641 void TurboAssembler::CallCFunction(ExternalReference
function,
2642 int num_arguments) {
2643 LoadAddress(rax,
function);
2644 CallCFunction(rax, num_arguments);
2647 void TurboAssembler::CallCFunction(Register
function,
int num_arguments) {
2648 DCHECK_LE(num_arguments, kMaxCParameters);
2649 DCHECK(has_frame());
2651 if (emit_debug_code()) {
2652 CheckStackAlignment();
2656 DCHECK_NE(base::OS::ActivationFrameAlignment(), 0);
2657 DCHECK_GE(num_arguments, 0);
2658 int argument_slots_on_stack =
2659 ArgumentStackSlotsForCFunctionCall(num_arguments);
2660 movp(rsp, Operand(rsp, argument_slots_on_stack * kRegisterSize));
2663 void TurboAssembler::CheckPageFlag(Register
object, Register scratch,
int mask,
2664 Condition cc, Label* condition_met,
2665 Label::Distance condition_met_distance) {
2666 DCHECK(cc == zero || cc == not_zero);
2667 if (scratch ==
object) {
2668 andp(scratch, Immediate(~kPageAlignmentMask));
2670 movp(scratch, Immediate(~kPageAlignmentMask));
2671 andp(scratch,
object);
2673 if (mask < (1 << kBitsPerByte)) {
2674 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
2675 Immediate(static_cast<uint8_t>(mask)));
2677 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2679 j(cc, condition_met, condition_met_distance);
2682 void TurboAssembler::ComputeCodeStartAddress(Register dst) {
2685 int pc = pc_offset();
2687 leaq(dst, Operand(¤t, -pc));
2690 void TurboAssembler::ResetSpeculationPoisonRegister() {
2692 Set(kSpeculationPoisonRegister, -1);
2698 #endif // V8_TARGET_ARCH_X64