5 #ifndef V8_ARM64_INSTRUCTIONS_ARM64_H_ 6 #define V8_ARM64_INSTRUCTIONS_ARM64_H_ 8 #include "src/arm64/constants-arm64.h" 9 #include "src/arm64/utils-arm64.h" 10 #include "src/globals.h" 11 #include "src/utils.h" 16 struct AssemblerOptions;
22 #if defined(V8_OS_WIN) 26 extern const float16 kFP16PositiveInfinity;
27 extern const float16 kFP16NegativeInfinity;
28 extern const float kFP32PositiveInfinity;
29 extern const float kFP32NegativeInfinity;
30 extern const double kFP64PositiveInfinity;
31 extern const double kFP64NegativeInfinity;
35 extern const double kFP64SignallingNaN;
36 extern const float kFP32SignallingNaN;
39 extern const double kFP64QuietNaN;
40 extern const float kFP32QuietNaN;
43 extern const double kFP64DefaultNaN;
44 extern const float kFP32DefaultNaN;
45 extern const float16 kFP16DefaultNaN;
47 #if defined(V8_OS_WIN) 51 unsigned CalcLSDataSize(LoadStoreOp op);
52 unsigned CalcLSPairDataSize(LoadStorePairOp op);
55 UnknownBranchType = 0,
58 CompareBranchType = 3,
71 FPPositiveInfinity = 0x1,
72 FPNegativeInfinity = 0x2,
90 V8_INLINE Instr InstructionBits()
const {
91 return *
reinterpret_cast<const Instr*
>(
this);
94 V8_INLINE
void SetInstructionBits(Instr new_instr) {
95 *
reinterpret_cast<Instr*
>(
this) = new_instr;
98 int Bit(
int pos)
const {
99 return (InstructionBits() >> pos) & 1;
102 uint32_t Bits(
int msb,
int lsb)
const {
103 return unsigned_bitextract_32(msb, lsb, InstructionBits());
106 int32_t SignedBits(
int msb,
int lsb)
const {
107 int32_t bits = *(
reinterpret_cast<const int32_t*
>(
this));
108 return signed_bitextract_32(msb, lsb, bits);
112 return InstructionBits() & mask;
115 V8_INLINE
const Instruction* following(
int count = 1)
const {
116 return InstructionAtOffset(count * static_cast<int>(kInstrSize));
119 V8_INLINE Instruction* following(
int count = 1) {
120 return InstructionAtOffset(count * static_cast<int>(kInstrSize));
123 V8_INLINE
const Instruction* preceding(
int count = 1)
const {
124 return following(-count);
127 V8_INLINE Instruction* preceding(
int count = 1) {
128 return following(-count);
131 #define DEFINE_GETTER(Name, HighBit, LowBit, Func) \ 132 int32_t Name() const { return Func(HighBit, LowBit); } 133 INSTRUCTION_FIELDS_LIST(DEFINE_GETTER)
138 int ImmPCRel()
const {
139 DCHECK(IsPCRelAddressing());
140 int offset = ((ImmPCRelHi() << ImmPCRelLo_width) | ImmPCRelLo());
141 int width = ImmPCRelLo_width + ImmPCRelHi_width;
142 return signed_bitextract_32(width - 1, 0, offset);
145 uint64_t ImmLogical();
146 unsigned ImmNEONabcdefgh()
const;
149 float ImmNEONFP32()
const;
150 double ImmNEONFP64()
const;
152 unsigned SizeLS()
const {
153 return CalcLSDataSize(static_cast<LoadStoreOp>(Mask(LoadStoreMask)));
156 unsigned SizeLSPair()
const {
157 return CalcLSPairDataSize(
158 static_cast<LoadStorePairOp>(Mask(LoadStorePairMask)));
161 int NEONLSIndex(
int access_size_shift)
const {
164 int size = NEONLSSize();
165 int index = (q << 3) | (s << 2) | size;
166 return index >> access_size_shift;
170 bool IsCondBranchImm()
const {
171 return Mask(ConditionalBranchFMask) == ConditionalBranchFixed;
174 bool IsUncondBranchImm()
const {
175 return Mask(UnconditionalBranchFMask) == UnconditionalBranchFixed;
178 bool IsCompareBranch()
const {
179 return Mask(CompareBranchFMask) == CompareBranchFixed;
182 bool IsTestBranch()
const {
183 return Mask(TestBranchFMask) == TestBranchFixed;
186 bool IsImmBranch()
const {
187 return BranchType() != UnknownBranchType;
190 static float Imm8ToFP32(
uint32_t imm8) {
198 uint32_t result = (bit7 << 31) | ((32 - bit6) << 25) | (bit5_to_0 << 19);
200 return bit_cast<
float>(result);
203 static double Imm8ToFP64(
uint32_t imm8) {
209 uint64_t bit7 = (bits >> 7) & 0x1;
210 uint64_t bit6 = (bits >> 6) & 0x1;
211 uint64_t bit5_to_0 = bits & 0x3f;
212 uint64_t result = (bit7 << 63) | ((256 - bit6) << 54) | (bit5_to_0 << 48);
214 return bit_cast<
double>(result);
217 bool IsLdrLiteral()
const {
218 return Mask(LoadLiteralFMask) == LoadLiteralFixed;
221 bool IsLdrLiteralX()
const {
222 return Mask(LoadLiteralMask) == LDR_x_lit;
225 bool IsPCRelAddressing()
const {
226 return Mask(PCRelAddressingFMask) == PCRelAddressingFixed;
230 return Mask(PCRelAddressingMask) == ADR;
233 bool IsBrk()
const {
return Mask(ExceptionMask) == BRK; }
235 bool IsUnresolvedInternalReference()
const {
238 return IsBrk() && following()->IsBrk();
241 bool IsLogicalImmediate()
const {
242 return Mask(LogicalImmediateFMask) == LogicalImmediateFixed;
245 bool IsAddSubImmediate()
const {
246 return Mask(AddSubImmediateFMask) == AddSubImmediateFixed;
249 bool IsAddSubShifted()
const {
250 return Mask(AddSubShiftedFMask) == AddSubShiftedFixed;
253 bool IsAddSubExtended()
const {
254 return Mask(AddSubExtendedFMask) == AddSubExtendedFixed;
258 bool IsLoadOrStore()
const {
259 return Mask(LoadStoreAnyFMask) == LoadStoreAnyFixed;
265 bool IsStore()
const;
269 Reg31Mode RdMode()
const {
275 if (IsAddSubImmediate() || IsAddSubExtended()) {
276 if (Mask(AddSubSetFlagsBit)) {
277 return Reg31IsZeroRegister;
279 return Reg31IsStackPointer;
282 if (IsLogicalImmediate()) {
287 if (Mask(LogicalImmediateMask & LogicalOpMask) == ANDS) {
288 return Reg31IsZeroRegister;
290 return Reg31IsStackPointer;
293 return Reg31IsZeroRegister;
298 Reg31Mode RnMode()
const {
304 if (IsLoadOrStore() || IsAddSubImmediate() || IsAddSubExtended()) {
305 return Reg31IsStackPointer;
307 return Reg31IsZeroRegister;
310 ImmBranchType BranchType()
const {
311 if (IsCondBranchImm()) {
312 return CondBranchType;
313 }
else if (IsUncondBranchImm()) {
314 return UncondBranchType;
315 }
else if (IsCompareBranch()) {
316 return CompareBranchType;
317 }
else if (IsTestBranch()) {
318 return TestBranchType;
320 return UnknownBranchType;
324 static int ImmBranchRangeBitwidth(ImmBranchType branch_type) {
325 switch (branch_type) {
326 case UncondBranchType:
327 return ImmUncondBranch_width;
329 return ImmCondBranch_width;
330 case CompareBranchType:
331 return ImmCmpBranch_width;
333 return ImmTestBranch_width;
340 static int32_t ImmBranchRange(ImmBranchType branch_type) {
341 return (1 << (ImmBranchRangeBitwidth(branch_type) + kInstrSizeLog2)) / 2 -
345 int ImmBranch()
const {
346 switch (BranchType()) {
347 case CondBranchType:
return ImmCondBranch();
348 case UncondBranchType:
return ImmUncondBranch();
349 case CompareBranchType:
return ImmCmpBranch();
350 case TestBranchType:
return ImmTestBranch();
351 default: UNREACHABLE();
356 int ImmUnresolvedInternalReference()
const {
357 DCHECK(IsUnresolvedInternalReference());
360 int32_t high16 = ImmException();
361 int32_t low16 = following()->ImmException();
362 return (high16 << 16) | low16;
365 bool IsUnconditionalBranch()
const {
366 return Mask(UnconditionalBranchMask) == B;
369 bool IsBranchAndLink()
const {
return Mask(UnconditionalBranchMask) == BL; }
371 bool IsBranchAndLinkToRegister()
const {
372 return Mask(UnconditionalBranchToRegisterMask) == BLR;
375 bool IsMovz()
const {
376 return (Mask(MoveWideImmediateMask) == MOVZ_x) ||
377 (Mask(MoveWideImmediateMask) == MOVZ_w);
380 bool IsMovk()
const {
381 return (Mask(MoveWideImmediateMask) == MOVK_x) ||
382 (Mask(MoveWideImmediateMask) == MOVK_w);
385 bool IsMovn()
const {
386 return (Mask(MoveWideImmediateMask) == MOVN_x) ||
387 (Mask(MoveWideImmediateMask) == MOVN_w);
395 return (Mask(LogicalShiftedMask) == ORR_x) &&
407 Instruction* ImmPCOffsetTarget();
409 static bool IsValidImmPCOffset(ImmBranchType branch_type, ptrdiff_t offset);
410 bool IsTargetInImmPCOffsetRange(Instruction* target);
413 void SetImmPCOffsetTarget(
const AssemblerOptions& options,
414 Instruction* target);
415 void SetUnresolvedInternalReferenceImmTarget(
const AssemblerOptions& options,
416 Instruction* target);
418 void SetImmLLiteral(Instruction* source);
421 int offset = ImmLLiteral() << kLoadLiteralScaleLog2;
422 return reinterpret_cast<uintptr_t>(
this) + offset;
425 enum CheckAlignment { NO_CHECK, CHECK_ALIGNMENT };
427 V8_INLINE
const Instruction* InstructionAtOffset(
428 int64_t offset, CheckAlignment check = CHECK_ALIGNMENT)
const {
430 DCHECK(check == NO_CHECK || IsAligned(offset, kInstrSize));
431 return this + offset;
434 V8_INLINE Instruction* InstructionAtOffset(
435 int64_t offset, CheckAlignment check = CHECK_ALIGNMENT) {
437 DCHECK(check == NO_CHECK || IsAligned(offset, kInstrSize));
438 return this + offset;
441 template<
typename T> V8_INLINE
static Instruction* Cast(T src) {
442 return reinterpret_cast<Instruction*
>(src);
445 V8_INLINE ptrdiff_t DistanceTo(Instruction* target) {
446 return reinterpret_cast<Address
>(target) - reinterpret_cast<Address>(
this);
450 static const int ImmPCRelRangeBitwidth = 21;
451 static bool IsValidPCRelOffset(ptrdiff_t offset) {
return is_int21(offset); }
452 void SetPCRelImmTarget(
const AssemblerOptions& options, Instruction* target);
453 void SetBranchImmTarget(Instruction* target);
458 kFormatUndefined = 0xffffffff,
460 kFormat16B = NEON_16B,
473 kFormatB = NEON_B | NEONScalar,
474 kFormatH = NEON_H | NEONScalar,
475 kFormatS = NEON_S | NEONScalar,
476 kFormatD = NEON_D | NEONScalar
479 VectorFormat VectorFormatHalfWidth(VectorFormat vform);
480 VectorFormat VectorFormatDoubleWidth(VectorFormat vform);
481 VectorFormat VectorFormatDoubleLanes(VectorFormat vform);
482 VectorFormat VectorFormatHalfLanes(VectorFormat vform);
483 VectorFormat ScalarFormatFromLaneSize(
int lanesize);
484 VectorFormat VectorFormatHalfWidthDoubleLanes(VectorFormat vform);
485 VectorFormat VectorFormatFillQ(VectorFormat vform);
486 VectorFormat ScalarFormatFromFormat(VectorFormat vform);
487 unsigned RegisterSizeInBitsFromFormat(VectorFormat vform);
488 unsigned RegisterSizeInBytesFromFormat(VectorFormat vform);
489 int LaneSizeInBytesFromFormat(VectorFormat vform);
490 unsigned LaneSizeInBitsFromFormat(VectorFormat vform);
491 int LaneSizeInBytesLog2FromFormat(VectorFormat vform);
492 int LaneCountFromFormat(VectorFormat vform);
493 int MaxLaneCountFromFormat(VectorFormat vform);
494 bool IsVectorFormat(VectorFormat vform);
495 int64_t MaxIntFromFormat(VectorFormat vform);
496 int64_t MinIntFromFormat(VectorFormat vform);
497 uint64_t MaxUintFromFormat(VectorFormat vform);
509 bool IsInlineData()
const;
510 uint64_t InlineData()
const;
520 const Instr kImmExceptionIsRedirectedCall = 0xca11;
524 const Instr kImmExceptionIsUnreachable = 0xdebf;
528 const Instr kImmExceptionIsPrintf = 0xdeb1;
545 const unsigned kPrintfArgCountOffset = 1 * kInstrSize;
546 const unsigned kPrintfArgPatternListOffset = 2 * kInstrSize;
547 const unsigned kPrintfLength = 3 * kInstrSize;
549 const unsigned kPrintfMaxArgCount = 4;
553 enum PrintfArgPattern {
560 static const unsigned kPrintfArgPatternBits = 2;
563 const Instr kImmExceptionIsDebug = 0xdeb0;
571 const unsigned kDebugCodeOffset = 1 * kInstrSize;
572 const unsigned kDebugParamsOffset = 2 * kInstrSize;
573 const unsigned kDebugMessageOffset = 3 * kInstrSize;
596 const unsigned kDebuggerTracingDirectivesMask = 3 << 6;
597 enum DebugParameters {
603 LOG_SYS_REGS = 1 << 4,
607 LOG_STATE = LOG_REGS | LOG_VREGS | LOG_SYS_REGS,
608 LOG_ALL = LOG_DISASM | LOG_STATE | LOG_WRITE,
611 TRACE_ENABLE = 1 << 6,
612 TRACE_DISABLE = 2 << 6,
613 TRACE_OVERRIDE = 3 << 6
632 static const unsigned kNEONFormatMaxBits = 6;
636 uint8_t bits[kNEONFormatMaxBits];
639 NEONFormat map[1 << kNEONFormatMaxBits];
644 enum SubstitutionMode { kPlaceholder, kFormat };
660 void SetFormatMap(
unsigned index,
const NEONFormatMap* format);
664 const char* SubstitutePlaceholders(
const char*
string);
668 const char* Substitute(
const char*
string, SubstitutionMode mode0 = kFormat,
669 SubstitutionMode mode1 = kFormat,
670 SubstitutionMode mode2 = kFormat);
673 const char* Mnemonic(
const char* mnemonic);
675 VectorFormat GetVectorFormat(
int format_index = 0);
676 VectorFormat GetVectorFormat(
const NEONFormatMap* format_map);
685 {NF_8B, NF_16B, NF_4H, NF_8H, NF_2S, NF_4S, NF_UNDEF, NF_2D}};
693 static const NEONFormatMap map = {{23, 22}, {NF_8H, NF_4S, NF_2D}};
703 {NF_2S, NF_4S, NF_UNDEF, NF_2D}};
712 {NF_8B, NF_16B, NF_4H, NF_8H, NF_2S, NF_4S, NF_1D, NF_2D}};
729 {19, 18, 17, 16, 30},
730 {NF_UNDEF, NF_UNDEF, NF_8B, NF_16B, NF_4H, NF_8H, NF_8B, NF_16B,
731 NF_2S, NF_4S, NF_8B, NF_16B, NF_4H, NF_8H, NF_8B, NF_16B,
732 NF_UNDEF, NF_2D, NF_8B, NF_16B, NF_4H, NF_8H, NF_8B, NF_16B,
733 NF_2S, NF_4S, NF_8B, NF_16B, NF_4H, NF_8H, NF_8B, NF_16B}};
740 static const NEONFormatMap map = {{23, 22}, {NF_B, NF_H, NF_S, NF_D}};
747 static const NEONFormatMap map = {{23, 22}, {NF_H, NF_S, NF_D}};
764 {NF_UNDEF, NF_B, NF_H, NF_B, NF_S, NF_B, NF_H, NF_B, NF_D, NF_B, NF_H,
765 NF_B, NF_S, NF_B, NF_H, NF_B}};
772 const char* GetSubstitute(
int index, SubstitutionMode mode);
779 static const char* NEONFormatAsString(NEONFormat format);
782 static const char* NEONFormatAsPlaceholder(NEONFormat format);
786 uint8_t PickBits(
const uint8_t bits[]);
790 char form_buffer_[64];
791 char mne_buffer_[16];
797 #endif // V8_ARM64_INSTRUCTIONS_ARM64_H_