5 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_ 6 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_ 8 #include "src/arm64/assembler-arm64.h" 9 #include "src/assembler.h" 10 #include "src/debug/debug.h" 11 #include "src/objects-inl.h" 12 #include "src/objects/smi.h" 17 bool CpuFeatures::SupportsOptimizer() {
return true; }
19 bool CpuFeatures::SupportsWasmSimd128() {
return true; }
21 void RelocInfo::apply(intptr_t delta) {
23 if (RelocInfo::IsInternalReference(rmode_)) {
25 intptr_t* p =
reinterpret_cast<intptr_t*
>(pc_);
28 Instruction* instr =
reinterpret_cast<Instruction*
>(pc_);
29 if (instr->IsBranchAndLink() || instr->IsUnconditionalBranch()) {
31 reinterpret_cast<Address
>(instr->ImmPCOffsetTarget());
32 Address new_target = old_target - delta;
33 instr->SetBranchImmTarget(reinterpret_cast<Instruction*>(new_target));
39 inline bool CPURegister::IsSameSizeAndType(
const CPURegister& other)
const {
40 return (reg_size_ == other.reg_size_) && (reg_type_ == other.reg_type_);
44 inline bool CPURegister::IsZero()
const {
46 return IsRegister() && (reg_code_ == kZeroRegCode);
50 inline bool CPURegister::IsSP()
const {
52 return IsRegister() && (reg_code_ == kSPRegInternalCode);
56 inline void CPURegList::Combine(
const CPURegList& other) {
58 DCHECK(other.type() == type_);
59 DCHECK(other.RegisterSizeInBits() == size_);
60 list_ |= other.list();
64 inline void CPURegList::Remove(
const CPURegList& other) {
66 if (other.type() == type_) {
67 list_ &= ~other.list();
72 inline void CPURegList::Combine(
const CPURegister& other) {
73 DCHECK(other.type() == type_);
74 DCHECK(other.SizeInBits() == size_);
75 Combine(other.code());
79 inline void CPURegList::Remove(
const CPURegister& other1,
80 const CPURegister& other2,
81 const CPURegister& other3,
82 const CPURegister& other4) {
83 if (!other1.IsNone() && (other1.type() == type_)) Remove(other1.code());
84 if (!other2.IsNone() && (other2.type() == type_)) Remove(other2.code());
85 if (!other3.IsNone() && (other3.type() == type_)) Remove(other3.code());
86 if (!other4.IsNone() && (other4.type() == type_)) Remove(other4.code());
90 inline void CPURegList::Combine(
int code) {
92 DCHECK(CPURegister::Create(code, size_, type_).IsValid());
93 list_ |= (1ULL << code);
97 inline void CPURegList::Remove(
int code) {
99 DCHECK(CPURegister::Create(code, size_, type_).IsValid());
100 list_ &= ~(1ULL << code);
104 inline Register Register::XRegFromCode(
unsigned code) {
105 if (code == kSPRegInternalCode) {
108 DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters));
109 return Register::Create(code, kXRegSizeInBits);
114 inline Register Register::WRegFromCode(
unsigned code) {
115 if (code == kSPRegInternalCode) {
118 DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters));
119 return Register::Create(code, kWRegSizeInBits);
123 inline VRegister VRegister::BRegFromCode(
unsigned code) {
124 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
125 return VRegister::Create(code, kBRegSizeInBits);
128 inline VRegister VRegister::HRegFromCode(
unsigned code) {
129 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
130 return VRegister::Create(code, kHRegSizeInBits);
133 inline VRegister VRegister::SRegFromCode(
unsigned code) {
134 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
135 return VRegister::Create(code, kSRegSizeInBits);
138 inline VRegister VRegister::DRegFromCode(
unsigned code) {
139 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
140 return VRegister::Create(code, kDRegSizeInBits);
143 inline VRegister VRegister::QRegFromCode(
unsigned code) {
144 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
145 return VRegister::Create(code, kQRegSizeInBits);
148 inline VRegister VRegister::VRegFromCode(
unsigned code) {
149 DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
150 return VRegister::Create(code, kVRegSizeInBits);
153 inline Register CPURegister::W()
const {
154 DCHECK(IsRegister());
155 return Register::WRegFromCode(reg_code_);
158 inline Register CPURegister::Reg()
const {
159 DCHECK(IsRegister());
160 return Register::Create(reg_code_, reg_size_);
163 inline VRegister CPURegister::VReg()
const {
164 DCHECK(IsVRegister());
165 return VRegister::Create(reg_code_, reg_size_);
168 inline Register CPURegister::X()
const {
169 DCHECK(IsRegister());
170 return Register::XRegFromCode(reg_code_);
173 inline VRegister CPURegister::V()
const {
174 DCHECK(IsVRegister());
175 return VRegister::VRegFromCode(reg_code_);
178 inline VRegister CPURegister::B()
const {
179 DCHECK(IsVRegister());
180 return VRegister::BRegFromCode(reg_code_);
183 inline VRegister CPURegister::H()
const {
184 DCHECK(IsVRegister());
185 return VRegister::HRegFromCode(reg_code_);
188 inline VRegister CPURegister::S()
const {
189 DCHECK(IsVRegister());
190 return VRegister::SRegFromCode(reg_code_);
193 inline VRegister CPURegister::D()
const {
194 DCHECK(IsVRegister());
195 return VRegister::DRegFromCode(reg_code_);
198 inline VRegister CPURegister::Q()
const {
199 DCHECK(IsVRegister());
200 return VRegister::QRegFromCode(reg_code_);
208 static const bool kIsIntType =
true;
209 static inline RelocInfo::Mode rmode_for(
T) {
return RelocInfo::NONE; }
210 static inline int64_t immediate_for(
T t) {
211 STATIC_ASSERT(
sizeof(
T) <= 8);
218 static const bool kIsIntType =
false;
219 static inline RelocInfo::Mode rmode_for(
Smi t) {
return RelocInfo::NONE; }
221 return static_cast<int64_t>(t.ptr());
228 static const bool kIsIntType =
false;
230 return RelocInfo::EXTERNAL_REFERENCE;
233 return static_cast<int64_t>(t.address());
240 InitializeHandle(value);
245 Immediate::Immediate(
T t)
246 : value_(ImmediateInitializer<
T>::immediate_for(t)),
247 rmode_(ImmediateInitializer<
T>::rmode_for(t)) {}
251 Immediate::Immediate(T t, RelocInfo::Mode rmode)
252 : value_(ImmediateInitializer<T>::immediate_for(t)),
254 STATIC_ASSERT(ImmediateInitializer<T>::kIsIntType);
259 Operand::Operand(Handle<T> value) : immediate_(value), reg_(NoReg) {}
263 Operand::Operand(T t) : immediate_(t), reg_(NoReg) {}
267 Operand::Operand(T t, RelocInfo::Mode rmode)
268 : immediate_(t, rmode),
271 Operand::Operand(Register reg, Shift shift,
unsigned shift_amount)
276 shift_amount_(shift_amount) {
277 DCHECK(reg.Is64Bits() || (shift_amount < kWRegSizeInBits));
278 DCHECK(reg.Is32Bits() || (shift_amount < kXRegSizeInBits));
279 DCHECK_IMPLIES(reg.IsSP(), shift_amount == 0);
283 Operand::Operand(Register reg, Extend extend,
unsigned shift_amount)
288 shift_amount_(shift_amount) {
289 DCHECK(reg.IsValid());
290 DCHECK_LE(shift_amount, 4);
294 DCHECK(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX)));
297 bool Operand::IsHeapObjectRequest()
const {
298 DCHECK_IMPLIES(heap_object_request_.has_value(), reg_.Is(NoReg));
299 DCHECK_IMPLIES(heap_object_request_.has_value(),
300 immediate_.rmode() == RelocInfo::EMBEDDED_OBJECT ||
301 immediate_.rmode() == RelocInfo::CODE_TARGET);
302 return heap_object_request_.has_value();
305 HeapObjectRequest Operand::heap_object_request()
const {
306 DCHECK(IsHeapObjectRequest());
307 return *heap_object_request_;
310 bool Operand::IsImmediate()
const {
311 return reg_.Is(NoReg) && !IsHeapObjectRequest();
315 bool Operand::IsShiftedRegister()
const {
316 return reg_.IsValid() && (shift_ != NO_SHIFT);
320 bool Operand::IsExtendedRegister()
const {
321 return reg_.IsValid() && (extend_ != NO_EXTEND);
325 bool Operand::IsZero()
const {
327 return ImmediateValue() == 0;
329 return reg().IsZero();
334 Operand Operand::ToExtendedRegister()
const {
335 DCHECK(IsShiftedRegister());
336 DCHECK((shift_ == LSL) && (shift_amount_ <= 4));
337 return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_);
340 Immediate Operand::immediate_for_heap_object_request()
const {
341 DCHECK((heap_object_request().kind() == HeapObjectRequest::kHeapNumber &&
342 immediate_.rmode() == RelocInfo::EMBEDDED_OBJECT) ||
343 (heap_object_request().kind() == HeapObjectRequest::kCodeStub &&
344 immediate_.rmode() == RelocInfo::CODE_TARGET) ||
345 (heap_object_request().kind() == HeapObjectRequest::kStringConstant &&
346 immediate_.rmode() == RelocInfo::EMBEDDED_OBJECT));
350 Immediate Operand::immediate()
const {
351 DCHECK(IsImmediate());
356 int64_t Operand::ImmediateValue()
const {
357 DCHECK(IsImmediate());
358 return immediate_.value();
361 RelocInfo::Mode Operand::ImmediateRMode()
const {
362 DCHECK(IsImmediate() || IsHeapObjectRequest());
363 return immediate_.rmode();
366 Register Operand::reg()
const {
367 DCHECK(IsShiftedRegister() || IsExtendedRegister());
372 Shift Operand::shift()
const {
373 DCHECK(IsShiftedRegister());
378 Extend Operand::extend()
const {
379 DCHECK(IsExtendedRegister());
384 unsigned Operand::shift_amount()
const {
385 DCHECK(IsShiftedRegister() || IsExtendedRegister());
386 return shift_amount_;
390 Operand Operand::UntagSmi(Register smi) {
391 DCHECK(smi.Is64Bits());
392 DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
393 return Operand(smi, ASR, kSmiShift);
397 Operand Operand::UntagSmiAndScale(Register smi,
int scale) {
398 DCHECK(smi.Is64Bits());
399 DCHECK((scale >= 0) && (scale <= (64 - kSmiValueSize)));
400 DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
401 if (scale > kSmiShift) {
402 return Operand(smi, LSL, scale - kSmiShift);
403 }
else if (scale < kSmiShift) {
404 return Operand(smi, ASR, kSmiShift - scale);
410 MemOperand::MemOperand()
411 : base_(NoReg), regoffset_(NoReg), offset_(0), addrmode_(Offset),
412 shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) {
416 MemOperand::MemOperand(Register base,
int64_t offset, AddrMode addrmode)
417 : base_(base), regoffset_(NoReg), offset_(offset), addrmode_(addrmode),
418 shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) {
419 DCHECK(base.Is64Bits() && !base.IsZero());
423 MemOperand::MemOperand(Register base,
426 unsigned shift_amount)
427 : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset),
428 shift_(NO_SHIFT), extend_(extend), shift_amount_(shift_amount) {
429 DCHECK(base.Is64Bits() && !base.IsZero());
430 DCHECK(!regoffset.IsSP());
431 DCHECK((extend == UXTW) || (extend == SXTW) || (extend == SXTX));
434 DCHECK(regoffset.Is64Bits() || (extend != SXTX));
438 MemOperand::MemOperand(Register base,
441 unsigned shift_amount)
442 : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset),
443 shift_(shift), extend_(NO_EXTEND), shift_amount_(shift_amount) {
444 DCHECK(base.Is64Bits() && !base.IsZero());
445 DCHECK(regoffset.Is64Bits() && !regoffset.IsSP());
446 DCHECK(shift == LSL);
449 MemOperand::MemOperand(Register base,
const Operand& offset, AddrMode addrmode)
450 : base_(base), regoffset_(NoReg), addrmode_(addrmode) {
451 DCHECK(base.Is64Bits() && !base.IsZero());
453 if (offset.IsImmediate()) {
454 offset_ = offset.ImmediateValue();
455 }
else if (offset.IsShiftedRegister()) {
456 DCHECK((addrmode == Offset) || (addrmode == PostIndex));
458 regoffset_ = offset.reg();
459 shift_ = offset.shift();
460 shift_amount_ = offset.shift_amount();
466 DCHECK(regoffset_.Is64Bits() && !regoffset_.IsSP());
467 DCHECK(shift_ == LSL);
469 DCHECK(offset.IsExtendedRegister());
470 DCHECK(addrmode == Offset);
472 regoffset_ = offset.reg();
473 extend_ = offset.extend();
474 shift_amount_ = offset.shift_amount();
480 DCHECK(!regoffset_.IsSP());
481 DCHECK((extend_ == UXTW) || (extend_ == SXTW) || (extend_ == SXTX));
482 DCHECK((regoffset_.Is64Bits() || (extend_ != SXTX)));
486 bool MemOperand::IsImmediateOffset()
const {
487 return (addrmode_ == Offset) && regoffset_.Is(NoReg);
491 bool MemOperand::IsRegisterOffset()
const {
492 return (addrmode_ == Offset) && !regoffset_.Is(NoReg);
496 bool MemOperand::IsPreIndex()
const {
497 return addrmode_ == PreIndex;
501 bool MemOperand::IsPostIndex()
const {
502 return addrmode_ == PostIndex;
505 Operand MemOperand::OffsetAsOperand()
const {
506 if (IsImmediateOffset()) {
509 DCHECK(IsRegisterOffset());
510 if (extend() == NO_EXTEND) {
511 return Operand(regoffset(), shift(), shift_amount());
513 return Operand(regoffset(), extend(), shift_amount());
519 void Assembler::Unreachable() {
521 debug(
"UNREACHABLE", __LINE__, BREAK);
529 Address Assembler::target_pointer_address_at(Address pc) {
530 Instruction* instr =
reinterpret_cast<Instruction*
>(pc);
531 DCHECK(instr->IsLdrLiteralX());
532 return reinterpret_cast<Address
>(instr->ImmPCOffsetTarget());
537 Address Assembler::target_address_at(Address pc, Address constant_pool) {
538 Instruction* instr =
reinterpret_cast<Instruction*
>(pc);
539 if (instr->IsLdrLiteralX()) {
540 return Memory<Address>(target_pointer_address_at(pc));
542 DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
543 return reinterpret_cast<Address
>(instr->ImmPCOffsetTarget());
547 Handle<Code> Assembler::code_target_object_handle_at(Address pc) {
548 Instruction* instr =
reinterpret_cast<Instruction*
>(pc);
549 if (instr->IsLdrLiteralX()) {
550 return Handle<Code>(
reinterpret_cast<Address*
>(
551 Assembler::target_address_at(pc, 0 )));
553 DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
554 DCHECK_EQ(instr->ImmPCOffset() % kInstrSize, 0);
555 return GetCodeTarget(instr->ImmPCOffset() >> kInstrSizeLog2);
559 Address Assembler::runtime_entry_at(Address pc) {
560 Instruction* instr =
reinterpret_cast<Instruction*
>(pc);
561 if (instr->IsLdrLiteralX()) {
562 return Assembler::target_address_at(pc, 0 );
564 DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
565 return instr->ImmPCOffset() + options().code_range_start;
569 Address Assembler::target_address_from_return_address(Address pc) {
575 Address candidate = pc - 2 * kInstrSize;
576 Instruction* instr =
reinterpret_cast<Instruction*
>(candidate);
578 DCHECK(instr->IsLdrLiteralX());
582 int Assembler::deserialization_special_target_size(Address location) {
583 Instruction* instr =
reinterpret_cast<Instruction*
>(location);
584 if (instr->IsBranchAndLink() || instr->IsUnconditionalBranch()) {
585 return kSpecialTargetSize;
587 DCHECK_EQ(instr->InstructionBits(), 0);
592 void Assembler::deserialization_set_special_target_at(Address location,
595 Instruction* instr =
reinterpret_cast<Instruction*
>(location);
596 if (instr->IsBranchAndLink() || instr->IsUnconditionalBranch()) {
602 instr->SetBranchImmTarget(reinterpret_cast<Instruction*>(target));
603 Assembler::FlushICache(location, kInstrSize);
605 DCHECK_EQ(instr->InstructionBits(), 0);
606 Memory<Address>(location) = target;
615 void Assembler::deserialization_set_target_internal_reference_at(
616 Address pc, Address target, RelocInfo::Mode mode) {
617 Memory<Address>(pc) = target;
620 void Assembler::set_target_address_at(Address pc, Address constant_pool,
622 ICacheFlushMode icache_flush_mode) {
623 Instruction* instr =
reinterpret_cast<Instruction*
>(pc);
624 if (instr->IsLdrLiteralX()) {
625 Memory<Address>(target_pointer_address_at(pc)) = target;
632 DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
638 instr->SetBranchImmTarget(reinterpret_cast<Instruction*>(target));
639 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
640 Assembler::FlushICache(pc, kInstrSize);
645 int RelocInfo::target_address_size() {
646 if (IsCodedSpecially()) {
647 return Assembler::kSpecialTargetSize;
649 DCHECK(reinterpret_cast<Instruction*>(pc_)->IsLdrLiteralX());
655 Address RelocInfo::target_address() {
656 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_));
657 return Assembler::target_address_at(pc_, constant_pool_);
660 Address RelocInfo::target_address_address() {
661 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_) ||
662 IsEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
663 IsOffHeapTarget(rmode_));
664 Instruction* instr =
reinterpret_cast<Instruction*
>(pc_);
678 if (instr->IsLdrLiteralX()) {
679 return constant_pool_entry_address();
681 DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
687 Address RelocInfo::constant_pool_entry_address() {
688 DCHECK(IsInConstantPool());
689 return Assembler::target_pointer_address_at(pc_);
692 HeapObject* RelocInfo::target_object() {
693 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
694 return HeapObject::cast(reinterpret_cast<Object*>(
695 Assembler::target_address_at(pc_, constant_pool_)));
698 Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
699 if (rmode_ == EMBEDDED_OBJECT) {
700 return Handle<HeapObject>(
reinterpret_cast<Address*
>(
701 Assembler::target_address_at(pc_, constant_pool_)));
703 DCHECK(IsCodeTarget(rmode_));
704 return origin->code_target_object_handle_at(pc_);
708 void RelocInfo::set_target_object(Heap* heap, HeapObject* target,
709 WriteBarrierMode write_barrier_mode,
710 ICacheFlushMode icache_flush_mode) {
711 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
712 Assembler::set_target_address_at(pc_, constant_pool_,
713 reinterpret_cast<Address>(target),
715 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() !=
nullptr) {
716 WriteBarrierForCode(host(),
this, target);
721 Address RelocInfo::target_external_reference() {
722 DCHECK(rmode_ == EXTERNAL_REFERENCE);
723 return Assembler::target_address_at(pc_, constant_pool_);
726 void RelocInfo::set_target_external_reference(
727 Address target, ICacheFlushMode icache_flush_mode) {
728 DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
729 Assembler::set_target_address_at(pc_, constant_pool_, target,
733 Address RelocInfo::target_internal_reference() {
734 DCHECK(rmode_ == INTERNAL_REFERENCE);
735 return Memory<Address>(pc_);
739 Address RelocInfo::target_internal_reference_address() {
740 DCHECK(rmode_ == INTERNAL_REFERENCE);
744 Address RelocInfo::target_runtime_entry(Assembler* origin) {
745 DCHECK(IsRuntimeEntry(rmode_));
746 return origin->runtime_entry_at(pc_);
749 void RelocInfo::set_target_runtime_entry(Address target,
750 WriteBarrierMode write_barrier_mode,
751 ICacheFlushMode icache_flush_mode) {
752 DCHECK(IsRuntimeEntry(rmode_));
753 if (target_address() != target) {
754 set_target_address(target, write_barrier_mode, icache_flush_mode);
758 Address RelocInfo::target_off_heap_target() {
759 DCHECK(IsOffHeapTarget(rmode_));
760 return Assembler::target_address_at(pc_, constant_pool_);
763 void RelocInfo::WipeOut() {
764 DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
765 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
766 IsInternalReference(rmode_) || IsOffHeapTarget(rmode_));
767 if (IsInternalReference(rmode_)) {
768 Memory<Address>(pc_) = kNullAddress;
770 Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
774 template <
typename ObjectVisitor>
775 void RelocInfo::Visit(ObjectVisitor* visitor) {
776 RelocInfo::Mode mode = rmode();
777 if (mode == RelocInfo::EMBEDDED_OBJECT) {
778 visitor->VisitEmbeddedPointer(host(),
this);
779 }
else if (RelocInfo::IsCodeTargetMode(mode)) {
780 visitor->VisitCodeTarget(host(),
this);
781 }
else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
782 visitor->VisitExternalReference(host(),
this);
783 }
else if (mode == RelocInfo::INTERNAL_REFERENCE) {
784 visitor->VisitInternalReference(host(),
this);
785 }
else if (RelocInfo::IsRuntimeEntry(mode)) {
786 visitor->VisitRuntimeEntry(host(),
this);
787 }
else if (RelocInfo::IsOffHeapTarget(mode)) {
788 visitor->VisitOffHeapTarget(host(),
this);
792 LoadStoreOp Assembler::LoadOpFor(
const CPURegister& rt) {
793 DCHECK(rt.IsValid());
794 if (rt.IsRegister()) {
795 return rt.Is64Bits() ? LDR_x : LDR_w;
797 DCHECK(rt.IsVRegister());
798 switch (rt.SizeInBits()) {
799 case kBRegSizeInBits:
801 case kHRegSizeInBits:
803 case kSRegSizeInBits:
805 case kDRegSizeInBits:
815 LoadStoreOp Assembler::StoreOpFor(
const CPURegister& rt) {
816 DCHECK(rt.IsValid());
817 if (rt.IsRegister()) {
818 return rt.Is64Bits() ? STR_x : STR_w;
820 DCHECK(rt.IsVRegister());
821 switch (rt.SizeInBits()) {
822 case kBRegSizeInBits:
824 case kHRegSizeInBits:
826 case kSRegSizeInBits:
828 case kDRegSizeInBits:
837 LoadStorePairOp Assembler::LoadPairOpFor(
const CPURegister& rt,
838 const CPURegister& rt2) {
839 DCHECK_EQ(STP_w | LoadStorePairLBit, LDP_w);
840 return static_cast<LoadStorePairOp
>(StorePairOpFor(rt, rt2) |
844 LoadStorePairOp Assembler::StorePairOpFor(
const CPURegister& rt,
845 const CPURegister& rt2) {
846 DCHECK(AreSameSizeAndType(rt, rt2));
848 if (rt.IsRegister()) {
849 return rt.Is64Bits() ? STP_x : STP_w;
851 DCHECK(rt.IsVRegister());
852 switch (rt.SizeInBits()) {
853 case kSRegSizeInBits:
855 case kDRegSizeInBits:
865 LoadLiteralOp Assembler::LoadLiteralOpFor(
const CPURegister& rt) {
866 if (rt.IsRegister()) {
867 return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit;
869 DCHECK(rt.IsVRegister());
870 return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit;
875 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) {
876 DCHECK_EQ(kStartOfLabelLinkChain, 0);
877 int offset = LinkAndGetByteOffsetTo(label);
878 DCHECK(IsAligned(offset, kInstrSize));
879 return offset >> kInstrSizeLog2;
883 Instr Assembler::Flags(FlagsUpdate S) {
885 return 1 << FlagsUpdate_offset;
886 }
else if (S == LeaveFlags) {
887 return 0 << FlagsUpdate_offset;
893 Instr Assembler::Cond(Condition cond) {
894 return cond << Condition_offset;
898 Instr Assembler::ImmPCRelAddress(
int imm21) {
899 CHECK(is_int21(imm21));
900 Instr imm =
static_cast<Instr
>(truncate_to_int21(imm21));
901 Instr immhi = (imm >> ImmPCRelLo_width) << ImmPCRelHi_offset;
902 Instr immlo = imm << ImmPCRelLo_offset;
903 return (immhi & ImmPCRelHi_mask) | (immlo & ImmPCRelLo_mask);
907 Instr Assembler::ImmUncondBranch(
int imm26) {
908 CHECK(is_int26(imm26));
909 return truncate_to_int26(imm26) << ImmUncondBranch_offset;
913 Instr Assembler::ImmCondBranch(
int imm19) {
914 CHECK(is_int19(imm19));
915 return truncate_to_int19(imm19) << ImmCondBranch_offset;
919 Instr Assembler::ImmCmpBranch(
int imm19) {
920 CHECK(is_int19(imm19));
921 return truncate_to_int19(imm19) << ImmCmpBranch_offset;
925 Instr Assembler::ImmTestBranch(
int imm14) {
926 CHECK(is_int14(imm14));
927 return truncate_to_int14(imm14) << ImmTestBranch_offset;
931 Instr Assembler::ImmTestBranchBit(
unsigned bit_pos) {
932 DCHECK(is_uint6(bit_pos));
934 unsigned b5 = bit_pos << (ImmTestBranchBit5_offset - 5);
935 unsigned b40 = bit_pos << ImmTestBranchBit40_offset;
936 b5 &= ImmTestBranchBit5_mask;
937 b40 &= ImmTestBranchBit40_mask;
942 Instr Assembler::SF(Register rd) {
943 return rd.Is64Bits() ? SixtyFourBits : ThirtyTwoBits;
947 Instr Assembler::ImmAddSub(
int imm) {
948 DCHECK(IsImmAddSub(imm));
949 if (is_uint12(imm)) {
950 imm <<= ImmAddSub_offset;
952 imm = ((imm >> 12) << ImmAddSub_offset) | (1 << ShiftAddSub_offset);
958 Instr Assembler::ImmS(
unsigned imms,
unsigned reg_size) {
959 DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(imms)) ||
960 ((reg_size == kWRegSizeInBits) && is_uint5(imms)));
962 return imms << ImmS_offset;
966 Instr Assembler::ImmR(
unsigned immr,
unsigned reg_size) {
967 DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) ||
968 ((reg_size == kWRegSizeInBits) && is_uint5(immr)));
970 DCHECK(is_uint6(immr));
971 return immr << ImmR_offset;
975 Instr Assembler::ImmSetBits(
unsigned imms,
unsigned reg_size) {
976 DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
977 DCHECK(is_uint6(imms));
978 DCHECK((reg_size == kXRegSizeInBits) || is_uint6(imms + 3));
980 return imms << ImmSetBits_offset;
984 Instr Assembler::ImmRotate(
unsigned immr,
unsigned reg_size) {
985 DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
986 DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) ||
987 ((reg_size == kWRegSizeInBits) && is_uint5(immr)));
989 return immr << ImmRotate_offset;
993 Instr Assembler::ImmLLiteral(
int imm19) {
994 CHECK(is_int19(imm19));
995 return truncate_to_int19(imm19) << ImmLLiteral_offset;
999 Instr Assembler::BitN(
unsigned bitn,
unsigned reg_size) {
1000 DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
1001 DCHECK((reg_size == kXRegSizeInBits) || (bitn == 0));
1003 return bitn << BitN_offset;
1007 Instr Assembler::ShiftDP(Shift shift) {
1008 DCHECK(shift == LSL || shift == LSR || shift == ASR || shift == ROR);
1009 return shift << ShiftDP_offset;
1013 Instr Assembler::ImmDPShift(
unsigned amount) {
1014 DCHECK(is_uint6(amount));
1015 return amount << ImmDPShift_offset;
1019 Instr Assembler::ExtendMode(Extend extend) {
1020 return extend << ExtendMode_offset;
1024 Instr Assembler::ImmExtendShift(
unsigned left_shift) {
1025 DCHECK_LE(left_shift, 4);
1026 return left_shift << ImmExtendShift_offset;
1030 Instr Assembler::ImmCondCmp(
unsigned imm) {
1031 DCHECK(is_uint5(imm));
1032 return imm << ImmCondCmp_offset;
1036 Instr Assembler::Nzcv(StatusFlags nzcv) {
1037 return ((nzcv >> Flags_offset) & 0xf) << Nzcv_offset;
1041 Instr Assembler::ImmLSUnsigned(
int imm12) {
1042 DCHECK(is_uint12(imm12));
1043 return imm12 << ImmLSUnsigned_offset;
1047 Instr Assembler::ImmLS(
int imm9) {
1048 DCHECK(is_int9(imm9));
1049 return truncate_to_int9(imm9) << ImmLS_offset;
1052 Instr Assembler::ImmLSPair(
int imm7,
unsigned size) {
1053 DCHECK_EQ((imm7 >> size) << size, imm7);
1054 int scaled_imm7 = imm7 >> size;
1055 DCHECK(is_int7(scaled_imm7));
1056 return truncate_to_int7(scaled_imm7) << ImmLSPair_offset;
1060 Instr Assembler::ImmShiftLS(
unsigned shift_amount) {
1061 DCHECK(is_uint1(shift_amount));
1062 return shift_amount << ImmShiftLS_offset;
1066 Instr Assembler::ImmException(
int imm16) {
1067 DCHECK(is_uint16(imm16));
1068 return imm16 << ImmException_offset;
1072 Instr Assembler::ImmSystemRegister(
int imm15) {
1073 DCHECK(is_uint15(imm15));
1074 return imm15 << ImmSystemRegister_offset;
1078 Instr Assembler::ImmHint(
int imm7) {
1079 DCHECK(is_uint7(imm7));
1080 return imm7 << ImmHint_offset;
1084 Instr Assembler::ImmBarrierDomain(
int imm2) {
1085 DCHECK(is_uint2(imm2));
1086 return imm2 << ImmBarrierDomain_offset;
1090 Instr Assembler::ImmBarrierType(
int imm2) {
1091 DCHECK(is_uint2(imm2));
1092 return imm2 << ImmBarrierType_offset;
1095 unsigned Assembler::CalcLSDataSize(LoadStoreOp op) {
1096 DCHECK((LSSize_offset + LSSize_width) == (kInstrSize * 8));
1097 unsigned size =
static_cast<Instr
>(op >> LSSize_offset);
1098 if ((op & LSVector_mask) != 0) {
1101 if ((size == 0) && ((op & LSOpc_mask) >> LSOpc_offset) >= 2) {
1102 size = kQRegSizeLog2;
1109 Instr Assembler::ImmMoveWide(
int imm) {
1110 DCHECK(is_uint16(imm));
1111 return imm << ImmMoveWide_offset;
1115 Instr Assembler::ShiftMoveWide(
int shift) {
1116 DCHECK(is_uint2(shift));
1117 return shift << ShiftMoveWide_offset;
1120 Instr Assembler::FPType(VRegister fd) {
return fd.Is64Bits() ? FP64 : FP32; }
1122 Instr Assembler::FPScale(
unsigned scale) {
1123 DCHECK(is_uint6(scale));
1124 return scale << FPScale_offset;
1128 const Register& Assembler::AppropriateZeroRegFor(
const CPURegister& reg)
const {
1129 return reg.Is64Bits() ? xzr : wzr;
1133 inline void Assembler::CheckBufferSpace() {
1134 DCHECK(pc_ < (buffer_ + buffer_size_));
1135 if (buffer_space() < kGap) {
1141 inline void Assembler::CheckBuffer() {
1143 if (pc_offset() >= next_veneer_pool_check_) {
1144 CheckVeneerPool(
false,
true);
1146 if (pc_offset() >= next_constant_pool_check_) {
1147 CheckConstPool(
false,
true);
1154 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_