V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
assembler-arm64-inl.h
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_
6 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_
7 
8 #include "src/arm64/assembler-arm64.h"
9 #include "src/assembler.h"
10 #include "src/debug/debug.h"
11 #include "src/objects-inl.h"
12 #include "src/objects/smi.h"
13 
14 namespace v8 {
15 namespace internal {
16 
17 bool CpuFeatures::SupportsOptimizer() { return true; }
18 
19 bool CpuFeatures::SupportsWasmSimd128() { return true; }
20 
21 void RelocInfo::apply(intptr_t delta) {
22  // On arm64 only internal references and immediate branches need extra work.
23  if (RelocInfo::IsInternalReference(rmode_)) {
24  // Absolute code pointer inside code object moves with the code object.
25  intptr_t* p = reinterpret_cast<intptr_t*>(pc_);
26  *p += delta; // Relocate entry.
27  } else {
28  Instruction* instr = reinterpret_cast<Instruction*>(pc_);
29  if (instr->IsBranchAndLink() || instr->IsUnconditionalBranch()) {
30  Address old_target =
31  reinterpret_cast<Address>(instr->ImmPCOffsetTarget());
32  Address new_target = old_target - delta;
33  instr->SetBranchImmTarget(reinterpret_cast<Instruction*>(new_target));
34  }
35  }
36 }
37 
38 
39 inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const {
40  return (reg_size_ == other.reg_size_) && (reg_type_ == other.reg_type_);
41 }
42 
43 
44 inline bool CPURegister::IsZero() const {
45  DCHECK(IsValid());
46  return IsRegister() && (reg_code_ == kZeroRegCode);
47 }
48 
49 
50 inline bool CPURegister::IsSP() const {
51  DCHECK(IsValid());
52  return IsRegister() && (reg_code_ == kSPRegInternalCode);
53 }
54 
55 
56 inline void CPURegList::Combine(const CPURegList& other) {
57  DCHECK(IsValid());
58  DCHECK(other.type() == type_);
59  DCHECK(other.RegisterSizeInBits() == size_);
60  list_ |= other.list();
61 }
62 
63 
64 inline void CPURegList::Remove(const CPURegList& other) {
65  DCHECK(IsValid());
66  if (other.type() == type_) {
67  list_ &= ~other.list();
68  }
69 }
70 
71 
72 inline void CPURegList::Combine(const CPURegister& other) {
73  DCHECK(other.type() == type_);
74  DCHECK(other.SizeInBits() == size_);
75  Combine(other.code());
76 }
77 
78 
79 inline void CPURegList::Remove(const CPURegister& other1,
80  const CPURegister& other2,
81  const CPURegister& other3,
82  const CPURegister& other4) {
83  if (!other1.IsNone() && (other1.type() == type_)) Remove(other1.code());
84  if (!other2.IsNone() && (other2.type() == type_)) Remove(other2.code());
85  if (!other3.IsNone() && (other3.type() == type_)) Remove(other3.code());
86  if (!other4.IsNone() && (other4.type() == type_)) Remove(other4.code());
87 }
88 
89 
90 inline void CPURegList::Combine(int code) {
91  DCHECK(IsValid());
92  DCHECK(CPURegister::Create(code, size_, type_).IsValid());
93  list_ |= (1ULL << code);
94 }
95 
96 
97 inline void CPURegList::Remove(int code) {
98  DCHECK(IsValid());
99  DCHECK(CPURegister::Create(code, size_, type_).IsValid());
100  list_ &= ~(1ULL << code);
101 }
102 
103 
104 inline Register Register::XRegFromCode(unsigned code) {
105  if (code == kSPRegInternalCode) {
106  return sp;
107  } else {
108  DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters));
109  return Register::Create(code, kXRegSizeInBits);
110  }
111 }
112 
113 
114 inline Register Register::WRegFromCode(unsigned code) {
115  if (code == kSPRegInternalCode) {
116  return wsp;
117  } else {
118  DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters));
119  return Register::Create(code, kWRegSizeInBits);
120  }
121 }
122 
123 inline VRegister VRegister::BRegFromCode(unsigned code) {
124  DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
125  return VRegister::Create(code, kBRegSizeInBits);
126 }
127 
128 inline VRegister VRegister::HRegFromCode(unsigned code) {
129  DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
130  return VRegister::Create(code, kHRegSizeInBits);
131 }
132 
133 inline VRegister VRegister::SRegFromCode(unsigned code) {
134  DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
135  return VRegister::Create(code, kSRegSizeInBits);
136 }
137 
138 inline VRegister VRegister::DRegFromCode(unsigned code) {
139  DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
140  return VRegister::Create(code, kDRegSizeInBits);
141 }
142 
143 inline VRegister VRegister::QRegFromCode(unsigned code) {
144  DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
145  return VRegister::Create(code, kQRegSizeInBits);
146 }
147 
148 inline VRegister VRegister::VRegFromCode(unsigned code) {
149  DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters));
150  return VRegister::Create(code, kVRegSizeInBits);
151 }
152 
153 inline Register CPURegister::W() const {
154  DCHECK(IsRegister());
155  return Register::WRegFromCode(reg_code_);
156 }
157 
158 inline Register CPURegister::Reg() const {
159  DCHECK(IsRegister());
160  return Register::Create(reg_code_, reg_size_);
161 }
162 
163 inline VRegister CPURegister::VReg() const {
164  DCHECK(IsVRegister());
165  return VRegister::Create(reg_code_, reg_size_);
166 }
167 
168 inline Register CPURegister::X() const {
169  DCHECK(IsRegister());
170  return Register::XRegFromCode(reg_code_);
171 }
172 
173 inline VRegister CPURegister::V() const {
174  DCHECK(IsVRegister());
175  return VRegister::VRegFromCode(reg_code_);
176 }
177 
178 inline VRegister CPURegister::B() const {
179  DCHECK(IsVRegister());
180  return VRegister::BRegFromCode(reg_code_);
181 }
182 
183 inline VRegister CPURegister::H() const {
184  DCHECK(IsVRegister());
185  return VRegister::HRegFromCode(reg_code_);
186 }
187 
188 inline VRegister CPURegister::S() const {
189  DCHECK(IsVRegister());
190  return VRegister::SRegFromCode(reg_code_);
191 }
192 
193 inline VRegister CPURegister::D() const {
194  DCHECK(IsVRegister());
195  return VRegister::DRegFromCode(reg_code_);
196 }
197 
198 inline VRegister CPURegister::Q() const {
199  DCHECK(IsVRegister());
200  return VRegister::QRegFromCode(reg_code_);
201 }
202 
203 
204 // Immediate.
205 // Default initializer is for int types
206 template<typename T>
208  static const bool kIsIntType = true;
209  static inline RelocInfo::Mode rmode_for(T) { return RelocInfo::NONE; }
210  static inline int64_t immediate_for(T t) {
211  STATIC_ASSERT(sizeof(T) <= 8);
212  return t;
213  }
214 };
215 
216 template <>
218  static const bool kIsIntType = false;
219  static inline RelocInfo::Mode rmode_for(Smi t) { return RelocInfo::NONE; }
220  static inline int64_t immediate_for(Smi t) {
221  return static_cast<int64_t>(t.ptr());
222  }
223 };
224 
225 
226 template<>
228  static const bool kIsIntType = false;
229  static inline RelocInfo::Mode rmode_for(ExternalReference t) {
230  return RelocInfo::EXTERNAL_REFERENCE;
231  }
232  static inline int64_t immediate_for(ExternalReference t) {;
233  return static_cast<int64_t>(t.address());
234  }
235 };
236 
237 
238 template<typename T>
239 Immediate::Immediate(Handle<T> value) {
240  InitializeHandle(value);
241 }
242 
243 
244 template<typename T>
245 Immediate::Immediate(T t)
246  : value_(ImmediateInitializer<T>::immediate_for(t)),
247  rmode_(ImmediateInitializer<T>::rmode_for(t)) {}
248 
249 
250 template<typename T>
251 Immediate::Immediate(T t, RelocInfo::Mode rmode)
252  : value_(ImmediateInitializer<T>::immediate_for(t)),
253  rmode_(rmode) {
254  STATIC_ASSERT(ImmediateInitializer<T>::kIsIntType);
255 }
256 
257 // Operand.
258 template<typename T>
259 Operand::Operand(Handle<T> value) : immediate_(value), reg_(NoReg) {}
260 
261 
262 template<typename T>
263 Operand::Operand(T t) : immediate_(t), reg_(NoReg) {}
264 
265 
266 template<typename T>
267 Operand::Operand(T t, RelocInfo::Mode rmode)
268  : immediate_(t, rmode),
269  reg_(NoReg) {}
270 
271 Operand::Operand(Register reg, Shift shift, unsigned shift_amount)
272  : immediate_(0),
273  reg_(reg),
274  shift_(shift),
275  extend_(NO_EXTEND),
276  shift_amount_(shift_amount) {
277  DCHECK(reg.Is64Bits() || (shift_amount < kWRegSizeInBits));
278  DCHECK(reg.Is32Bits() || (shift_amount < kXRegSizeInBits));
279  DCHECK_IMPLIES(reg.IsSP(), shift_amount == 0);
280 }
281 
282 
283 Operand::Operand(Register reg, Extend extend, unsigned shift_amount)
284  : immediate_(0),
285  reg_(reg),
286  shift_(NO_SHIFT),
287  extend_(extend),
288  shift_amount_(shift_amount) {
289  DCHECK(reg.IsValid());
290  DCHECK_LE(shift_amount, 4);
291  DCHECK(!reg.IsSP());
292 
293  // Extend modes SXTX and UXTX require a 64-bit register.
294  DCHECK(reg.Is64Bits() || ((extend != SXTX) && (extend != UXTX)));
295 }
296 
297 bool Operand::IsHeapObjectRequest() const {
298  DCHECK_IMPLIES(heap_object_request_.has_value(), reg_.Is(NoReg));
299  DCHECK_IMPLIES(heap_object_request_.has_value(),
300  immediate_.rmode() == RelocInfo::EMBEDDED_OBJECT ||
301  immediate_.rmode() == RelocInfo::CODE_TARGET);
302  return heap_object_request_.has_value();
303 }
304 
305 HeapObjectRequest Operand::heap_object_request() const {
306  DCHECK(IsHeapObjectRequest());
307  return *heap_object_request_;
308 }
309 
310 bool Operand::IsImmediate() const {
311  return reg_.Is(NoReg) && !IsHeapObjectRequest();
312 }
313 
314 
315 bool Operand::IsShiftedRegister() const {
316  return reg_.IsValid() && (shift_ != NO_SHIFT);
317 }
318 
319 
320 bool Operand::IsExtendedRegister() const {
321  return reg_.IsValid() && (extend_ != NO_EXTEND);
322 }
323 
324 
325 bool Operand::IsZero() const {
326  if (IsImmediate()) {
327  return ImmediateValue() == 0;
328  } else {
329  return reg().IsZero();
330  }
331 }
332 
333 
334 Operand Operand::ToExtendedRegister() const {
335  DCHECK(IsShiftedRegister());
336  DCHECK((shift_ == LSL) && (shift_amount_ <= 4));
337  return Operand(reg_, reg_.Is64Bits() ? UXTX : UXTW, shift_amount_);
338 }
339 
340 Immediate Operand::immediate_for_heap_object_request() const {
341  DCHECK((heap_object_request().kind() == HeapObjectRequest::kHeapNumber &&
342  immediate_.rmode() == RelocInfo::EMBEDDED_OBJECT) ||
343  (heap_object_request().kind() == HeapObjectRequest::kCodeStub &&
344  immediate_.rmode() == RelocInfo::CODE_TARGET) ||
345  (heap_object_request().kind() == HeapObjectRequest::kStringConstant &&
346  immediate_.rmode() == RelocInfo::EMBEDDED_OBJECT));
347  return immediate_;
348 }
349 
350 Immediate Operand::immediate() const {
351  DCHECK(IsImmediate());
352  return immediate_;
353 }
354 
355 
356 int64_t Operand::ImmediateValue() const {
357  DCHECK(IsImmediate());
358  return immediate_.value();
359 }
360 
361 RelocInfo::Mode Operand::ImmediateRMode() const {
362  DCHECK(IsImmediate() || IsHeapObjectRequest());
363  return immediate_.rmode();
364 }
365 
366 Register Operand::reg() const {
367  DCHECK(IsShiftedRegister() || IsExtendedRegister());
368  return reg_;
369 }
370 
371 
372 Shift Operand::shift() const {
373  DCHECK(IsShiftedRegister());
374  return shift_;
375 }
376 
377 
378 Extend Operand::extend() const {
379  DCHECK(IsExtendedRegister());
380  return extend_;
381 }
382 
383 
384 unsigned Operand::shift_amount() const {
385  DCHECK(IsShiftedRegister() || IsExtendedRegister());
386  return shift_amount_;
387 }
388 
389 
390 Operand Operand::UntagSmi(Register smi) {
391  DCHECK(smi.Is64Bits());
392  DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
393  return Operand(smi, ASR, kSmiShift);
394 }
395 
396 
397 Operand Operand::UntagSmiAndScale(Register smi, int scale) {
398  DCHECK(smi.Is64Bits());
399  DCHECK((scale >= 0) && (scale <= (64 - kSmiValueSize)));
400  DCHECK(SmiValuesAre32Bits() || SmiValuesAre31Bits());
401  if (scale > kSmiShift) {
402  return Operand(smi, LSL, scale - kSmiShift);
403  } else if (scale < kSmiShift) {
404  return Operand(smi, ASR, kSmiShift - scale);
405  }
406  return Operand(smi);
407 }
408 
409 
410 MemOperand::MemOperand()
411  : base_(NoReg), regoffset_(NoReg), offset_(0), addrmode_(Offset),
412  shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) {
413 }
414 
415 
416 MemOperand::MemOperand(Register base, int64_t offset, AddrMode addrmode)
417  : base_(base), regoffset_(NoReg), offset_(offset), addrmode_(addrmode),
418  shift_(NO_SHIFT), extend_(NO_EXTEND), shift_amount_(0) {
419  DCHECK(base.Is64Bits() && !base.IsZero());
420 }
421 
422 
423 MemOperand::MemOperand(Register base,
424  Register regoffset,
425  Extend extend,
426  unsigned shift_amount)
427  : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset),
428  shift_(NO_SHIFT), extend_(extend), shift_amount_(shift_amount) {
429  DCHECK(base.Is64Bits() && !base.IsZero());
430  DCHECK(!regoffset.IsSP());
431  DCHECK((extend == UXTW) || (extend == SXTW) || (extend == SXTX));
432 
433  // SXTX extend mode requires a 64-bit offset register.
434  DCHECK(regoffset.Is64Bits() || (extend != SXTX));
435 }
436 
437 
438 MemOperand::MemOperand(Register base,
439  Register regoffset,
440  Shift shift,
441  unsigned shift_amount)
442  : base_(base), regoffset_(regoffset), offset_(0), addrmode_(Offset),
443  shift_(shift), extend_(NO_EXTEND), shift_amount_(shift_amount) {
444  DCHECK(base.Is64Bits() && !base.IsZero());
445  DCHECK(regoffset.Is64Bits() && !regoffset.IsSP());
446  DCHECK(shift == LSL);
447 }
448 
449 MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode)
450  : base_(base), regoffset_(NoReg), addrmode_(addrmode) {
451  DCHECK(base.Is64Bits() && !base.IsZero());
452 
453  if (offset.IsImmediate()) {
454  offset_ = offset.ImmediateValue();
455  } else if (offset.IsShiftedRegister()) {
456  DCHECK((addrmode == Offset) || (addrmode == PostIndex));
457 
458  regoffset_ = offset.reg();
459  shift_ = offset.shift();
460  shift_amount_ = offset.shift_amount();
461 
462  extend_ = NO_EXTEND;
463  offset_ = 0;
464 
465  // These assertions match those in the shifted-register constructor.
466  DCHECK(regoffset_.Is64Bits() && !regoffset_.IsSP());
467  DCHECK(shift_ == LSL);
468  } else {
469  DCHECK(offset.IsExtendedRegister());
470  DCHECK(addrmode == Offset);
471 
472  regoffset_ = offset.reg();
473  extend_ = offset.extend();
474  shift_amount_ = offset.shift_amount();
475 
476  shift_ = NO_SHIFT;
477  offset_ = 0;
478 
479  // These assertions match those in the extended-register constructor.
480  DCHECK(!regoffset_.IsSP());
481  DCHECK((extend_ == UXTW) || (extend_ == SXTW) || (extend_ == SXTX));
482  DCHECK((regoffset_.Is64Bits() || (extend_ != SXTX)));
483  }
484 }
485 
486 bool MemOperand::IsImmediateOffset() const {
487  return (addrmode_ == Offset) && regoffset_.Is(NoReg);
488 }
489 
490 
491 bool MemOperand::IsRegisterOffset() const {
492  return (addrmode_ == Offset) && !regoffset_.Is(NoReg);
493 }
494 
495 
496 bool MemOperand::IsPreIndex() const {
497  return addrmode_ == PreIndex;
498 }
499 
500 
501 bool MemOperand::IsPostIndex() const {
502  return addrmode_ == PostIndex;
503 }
504 
505 Operand MemOperand::OffsetAsOperand() const {
506  if (IsImmediateOffset()) {
507  return offset();
508  } else {
509  DCHECK(IsRegisterOffset());
510  if (extend() == NO_EXTEND) {
511  return Operand(regoffset(), shift(), shift_amount());
512  } else {
513  return Operand(regoffset(), extend(), shift_amount());
514  }
515  }
516 }
517 
518 
519 void Assembler::Unreachable() {
520 #ifdef USE_SIMULATOR
521  debug("UNREACHABLE", __LINE__, BREAK);
522 #else
523  // Crash by branching to 0. lr now points near the fault.
524  Emit(BLR | Rn(xzr));
525 #endif
526 }
527 
528 
529 Address Assembler::target_pointer_address_at(Address pc) {
530  Instruction* instr = reinterpret_cast<Instruction*>(pc);
531  DCHECK(instr->IsLdrLiteralX());
532  return reinterpret_cast<Address>(instr->ImmPCOffsetTarget());
533 }
534 
535 
536 // Read/Modify the code target address in the branch/call instruction at pc.
537 Address Assembler::target_address_at(Address pc, Address constant_pool) {
538  Instruction* instr = reinterpret_cast<Instruction*>(pc);
539  if (instr->IsLdrLiteralX()) {
540  return Memory<Address>(target_pointer_address_at(pc));
541  } else {
542  DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
543  return reinterpret_cast<Address>(instr->ImmPCOffsetTarget());
544  }
545 }
546 
547 Handle<Code> Assembler::code_target_object_handle_at(Address pc) {
548  Instruction* instr = reinterpret_cast<Instruction*>(pc);
549  if (instr->IsLdrLiteralX()) {
550  return Handle<Code>(reinterpret_cast<Address*>(
551  Assembler::target_address_at(pc, 0 /* unused */)));
552  } else {
553  DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
554  DCHECK_EQ(instr->ImmPCOffset() % kInstrSize, 0);
555  return GetCodeTarget(instr->ImmPCOffset() >> kInstrSizeLog2);
556  }
557 }
558 
559 Address Assembler::runtime_entry_at(Address pc) {
560  Instruction* instr = reinterpret_cast<Instruction*>(pc);
561  if (instr->IsLdrLiteralX()) {
562  return Assembler::target_address_at(pc, 0 /* unused */);
563  } else {
564  DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
565  return instr->ImmPCOffset() + options().code_range_start;
566  }
567 }
568 
569 Address Assembler::target_address_from_return_address(Address pc) {
570  // Returns the address of the call target from the return address that will
571  // be returned to after a call.
572  // Call sequence on ARM64 is:
573  // ldr ip0, #... @ load from literal pool
574  // blr ip0
575  Address candidate = pc - 2 * kInstrSize;
576  Instruction* instr = reinterpret_cast<Instruction*>(candidate);
577  USE(instr);
578  DCHECK(instr->IsLdrLiteralX());
579  return candidate;
580 }
581 
582 int Assembler::deserialization_special_target_size(Address location) {
583  Instruction* instr = reinterpret_cast<Instruction*>(location);
584  if (instr->IsBranchAndLink() || instr->IsUnconditionalBranch()) {
585  return kSpecialTargetSize;
586  } else {
587  DCHECK_EQ(instr->InstructionBits(), 0);
588  return kPointerSize;
589  }
590 }
591 
592 void Assembler::deserialization_set_special_target_at(Address location,
593  Code code,
594  Address target) {
595  Instruction* instr = reinterpret_cast<Instruction*>(location);
596  if (instr->IsBranchAndLink() || instr->IsUnconditionalBranch()) {
597  if (target == 0) {
598  // We are simply wiping the target out for serialization. Set the offset
599  // to zero instead.
600  target = location;
601  }
602  instr->SetBranchImmTarget(reinterpret_cast<Instruction*>(target));
603  Assembler::FlushICache(location, kInstrSize);
604  } else {
605  DCHECK_EQ(instr->InstructionBits(), 0);
606  Memory<Address>(location) = target;
607  // Intuitively, we would think it is necessary to always flush the
608  // instruction cache after patching a target address in the code. However,
609  // in this case, only the constant pool contents change. The instruction
610  // accessing the constant pool remains unchanged, so a flush is not
611  // required.
612  }
613 }
614 
615 void Assembler::deserialization_set_target_internal_reference_at(
616  Address pc, Address target, RelocInfo::Mode mode) {
617  Memory<Address>(pc) = target;
618 }
619 
620 void Assembler::set_target_address_at(Address pc, Address constant_pool,
621  Address target,
622  ICacheFlushMode icache_flush_mode) {
623  Instruction* instr = reinterpret_cast<Instruction*>(pc);
624  if (instr->IsLdrLiteralX()) {
625  Memory<Address>(target_pointer_address_at(pc)) = target;
626  // Intuitively, we would think it is necessary to always flush the
627  // instruction cache after patching a target address in the code. However,
628  // in this case, only the constant pool contents change. The instruction
629  // accessing the constant pool remains unchanged, so a flush is not
630  // required.
631  } else {
632  DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
633  if (target == 0) {
634  // We are simply wiping the target out for serialization. Set the offset
635  // to zero instead.
636  target = pc;
637  }
638  instr->SetBranchImmTarget(reinterpret_cast<Instruction*>(target));
639  if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
640  Assembler::FlushICache(pc, kInstrSize);
641  }
642  }
643 }
644 
645 int RelocInfo::target_address_size() {
646  if (IsCodedSpecially()) {
647  return Assembler::kSpecialTargetSize;
648  } else {
649  DCHECK(reinterpret_cast<Instruction*>(pc_)->IsLdrLiteralX());
650  return kPointerSize;
651  }
652 }
653 
654 
655 Address RelocInfo::target_address() {
656  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_));
657  return Assembler::target_address_at(pc_, constant_pool_);
658 }
659 
660 Address RelocInfo::target_address_address() {
661  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_) ||
662  IsEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
663  IsOffHeapTarget(rmode_));
664  Instruction* instr = reinterpret_cast<Instruction*>(pc_);
665  // Read the address of the word containing the target_address in an
666  // instruction stream.
667  // The only architecture-independent user of this function is the serializer.
668  // The serializer uses it to find out how many raw bytes of instruction to
669  // output before the next target.
670  // For an instruction like B/BL, where the target bits are mixed into the
671  // instruction bits, the size of the target will be zero, indicating that the
672  // serializer should not step forward in memory after a target is resolved
673  // and written.
674  // For LDR literal instructions, we can skip up to the constant pool entry
675  // address. We make sure that RelocInfo is ordered by the
676  // target_address_address so that we do not skip over any relocatable
677  // instruction sequences.
678  if (instr->IsLdrLiteralX()) {
679  return constant_pool_entry_address();
680  } else {
681  DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch());
682  return pc_;
683  }
684 }
685 
686 
687 Address RelocInfo::constant_pool_entry_address() {
688  DCHECK(IsInConstantPool());
689  return Assembler::target_pointer_address_at(pc_);
690 }
691 
692 HeapObject* RelocInfo::target_object() {
693  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
694  return HeapObject::cast(reinterpret_cast<Object*>(
695  Assembler::target_address_at(pc_, constant_pool_)));
696 }
697 
698 Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
699  if (rmode_ == EMBEDDED_OBJECT) {
700  return Handle<HeapObject>(reinterpret_cast<Address*>(
701  Assembler::target_address_at(pc_, constant_pool_)));
702  } else {
703  DCHECK(IsCodeTarget(rmode_));
704  return origin->code_target_object_handle_at(pc_);
705  }
706 }
707 
708 void RelocInfo::set_target_object(Heap* heap, HeapObject* target,
709  WriteBarrierMode write_barrier_mode,
710  ICacheFlushMode icache_flush_mode) {
711  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
712  Assembler::set_target_address_at(pc_, constant_pool_,
713  reinterpret_cast<Address>(target),
714  icache_flush_mode);
715  if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != nullptr) {
716  WriteBarrierForCode(host(), this, target);
717  }
718 }
719 
720 
721 Address RelocInfo::target_external_reference() {
722  DCHECK(rmode_ == EXTERNAL_REFERENCE);
723  return Assembler::target_address_at(pc_, constant_pool_);
724 }
725 
726 void RelocInfo::set_target_external_reference(
727  Address target, ICacheFlushMode icache_flush_mode) {
728  DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
729  Assembler::set_target_address_at(pc_, constant_pool_, target,
730  icache_flush_mode);
731 }
732 
733 Address RelocInfo::target_internal_reference() {
734  DCHECK(rmode_ == INTERNAL_REFERENCE);
735  return Memory<Address>(pc_);
736 }
737 
738 
739 Address RelocInfo::target_internal_reference_address() {
740  DCHECK(rmode_ == INTERNAL_REFERENCE);
741  return pc_;
742 }
743 
744 Address RelocInfo::target_runtime_entry(Assembler* origin) {
745  DCHECK(IsRuntimeEntry(rmode_));
746  return origin->runtime_entry_at(pc_);
747 }
748 
749 void RelocInfo::set_target_runtime_entry(Address target,
750  WriteBarrierMode write_barrier_mode,
751  ICacheFlushMode icache_flush_mode) {
752  DCHECK(IsRuntimeEntry(rmode_));
753  if (target_address() != target) {
754  set_target_address(target, write_barrier_mode, icache_flush_mode);
755  }
756 }
757 
758 Address RelocInfo::target_off_heap_target() {
759  DCHECK(IsOffHeapTarget(rmode_));
760  return Assembler::target_address_at(pc_, constant_pool_);
761 }
762 
763 void RelocInfo::WipeOut() {
764  DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
765  IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
766  IsInternalReference(rmode_) || IsOffHeapTarget(rmode_));
767  if (IsInternalReference(rmode_)) {
768  Memory<Address>(pc_) = kNullAddress;
769  } else {
770  Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
771  }
772 }
773 
774 template <typename ObjectVisitor>
775 void RelocInfo::Visit(ObjectVisitor* visitor) {
776  RelocInfo::Mode mode = rmode();
777  if (mode == RelocInfo::EMBEDDED_OBJECT) {
778  visitor->VisitEmbeddedPointer(host(), this);
779  } else if (RelocInfo::IsCodeTargetMode(mode)) {
780  visitor->VisitCodeTarget(host(), this);
781  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
782  visitor->VisitExternalReference(host(), this);
783  } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
784  visitor->VisitInternalReference(host(), this);
785  } else if (RelocInfo::IsRuntimeEntry(mode)) {
786  visitor->VisitRuntimeEntry(host(), this);
787  } else if (RelocInfo::IsOffHeapTarget(mode)) {
788  visitor->VisitOffHeapTarget(host(), this);
789  }
790 }
791 
792 LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) {
793  DCHECK(rt.IsValid());
794  if (rt.IsRegister()) {
795  return rt.Is64Bits() ? LDR_x : LDR_w;
796  } else {
797  DCHECK(rt.IsVRegister());
798  switch (rt.SizeInBits()) {
799  case kBRegSizeInBits:
800  return LDR_b;
801  case kHRegSizeInBits:
802  return LDR_h;
803  case kSRegSizeInBits:
804  return LDR_s;
805  case kDRegSizeInBits:
806  return LDR_d;
807  default:
808  DCHECK(rt.IsQ());
809  return LDR_q;
810  }
811  }
812 }
813 
814 
815 LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) {
816  DCHECK(rt.IsValid());
817  if (rt.IsRegister()) {
818  return rt.Is64Bits() ? STR_x : STR_w;
819  } else {
820  DCHECK(rt.IsVRegister());
821  switch (rt.SizeInBits()) {
822  case kBRegSizeInBits:
823  return STR_b;
824  case kHRegSizeInBits:
825  return STR_h;
826  case kSRegSizeInBits:
827  return STR_s;
828  case kDRegSizeInBits:
829  return STR_d;
830  default:
831  DCHECK(rt.IsQ());
832  return STR_q;
833  }
834  }
835 }
836 
837 LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt,
838  const CPURegister& rt2) {
839  DCHECK_EQ(STP_w | LoadStorePairLBit, LDP_w);
840  return static_cast<LoadStorePairOp>(StorePairOpFor(rt, rt2) |
841  LoadStorePairLBit);
842 }
843 
844 LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt,
845  const CPURegister& rt2) {
846  DCHECK(AreSameSizeAndType(rt, rt2));
847  USE(rt2);
848  if (rt.IsRegister()) {
849  return rt.Is64Bits() ? STP_x : STP_w;
850  } else {
851  DCHECK(rt.IsVRegister());
852  switch (rt.SizeInBits()) {
853  case kSRegSizeInBits:
854  return STP_s;
855  case kDRegSizeInBits:
856  return STP_d;
857  default:
858  DCHECK(rt.IsQ());
859  return STP_q;
860  }
861  }
862 }
863 
864 
865 LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) {
866  if (rt.IsRegister()) {
867  return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit;
868  } else {
869  DCHECK(rt.IsVRegister());
870  return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit;
871  }
872 }
873 
874 
875 int Assembler::LinkAndGetInstructionOffsetTo(Label* label) {
876  DCHECK_EQ(kStartOfLabelLinkChain, 0);
877  int offset = LinkAndGetByteOffsetTo(label);
878  DCHECK(IsAligned(offset, kInstrSize));
879  return offset >> kInstrSizeLog2;
880 }
881 
882 
883 Instr Assembler::Flags(FlagsUpdate S) {
884  if (S == SetFlags) {
885  return 1 << FlagsUpdate_offset;
886  } else if (S == LeaveFlags) {
887  return 0 << FlagsUpdate_offset;
888  }
889  UNREACHABLE();
890 }
891 
892 
893 Instr Assembler::Cond(Condition cond) {
894  return cond << Condition_offset;
895 }
896 
897 
898 Instr Assembler::ImmPCRelAddress(int imm21) {
899  CHECK(is_int21(imm21));
900  Instr imm = static_cast<Instr>(truncate_to_int21(imm21));
901  Instr immhi = (imm >> ImmPCRelLo_width) << ImmPCRelHi_offset;
902  Instr immlo = imm << ImmPCRelLo_offset;
903  return (immhi & ImmPCRelHi_mask) | (immlo & ImmPCRelLo_mask);
904 }
905 
906 
907 Instr Assembler::ImmUncondBranch(int imm26) {
908  CHECK(is_int26(imm26));
909  return truncate_to_int26(imm26) << ImmUncondBranch_offset;
910 }
911 
912 
913 Instr Assembler::ImmCondBranch(int imm19) {
914  CHECK(is_int19(imm19));
915  return truncate_to_int19(imm19) << ImmCondBranch_offset;
916 }
917 
918 
919 Instr Assembler::ImmCmpBranch(int imm19) {
920  CHECK(is_int19(imm19));
921  return truncate_to_int19(imm19) << ImmCmpBranch_offset;
922 }
923 
924 
925 Instr Assembler::ImmTestBranch(int imm14) {
926  CHECK(is_int14(imm14));
927  return truncate_to_int14(imm14) << ImmTestBranch_offset;
928 }
929 
930 
931 Instr Assembler::ImmTestBranchBit(unsigned bit_pos) {
932  DCHECK(is_uint6(bit_pos));
933  // Subtract five from the shift offset, as we need bit 5 from bit_pos.
934  unsigned b5 = bit_pos << (ImmTestBranchBit5_offset - 5);
935  unsigned b40 = bit_pos << ImmTestBranchBit40_offset;
936  b5 &= ImmTestBranchBit5_mask;
937  b40 &= ImmTestBranchBit40_mask;
938  return b5 | b40;
939 }
940 
941 
942 Instr Assembler::SF(Register rd) {
943  return rd.Is64Bits() ? SixtyFourBits : ThirtyTwoBits;
944 }
945 
946 
947 Instr Assembler::ImmAddSub(int imm) {
948  DCHECK(IsImmAddSub(imm));
949  if (is_uint12(imm)) { // No shift required.
950  imm <<= ImmAddSub_offset;
951  } else {
952  imm = ((imm >> 12) << ImmAddSub_offset) | (1 << ShiftAddSub_offset);
953  }
954  return imm;
955 }
956 
957 
958 Instr Assembler::ImmS(unsigned imms, unsigned reg_size) {
959  DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(imms)) ||
960  ((reg_size == kWRegSizeInBits) && is_uint5(imms)));
961  USE(reg_size);
962  return imms << ImmS_offset;
963 }
964 
965 
966 Instr Assembler::ImmR(unsigned immr, unsigned reg_size) {
967  DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) ||
968  ((reg_size == kWRegSizeInBits) && is_uint5(immr)));
969  USE(reg_size);
970  DCHECK(is_uint6(immr));
971  return immr << ImmR_offset;
972 }
973 
974 
975 Instr Assembler::ImmSetBits(unsigned imms, unsigned reg_size) {
976  DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
977  DCHECK(is_uint6(imms));
978  DCHECK((reg_size == kXRegSizeInBits) || is_uint6(imms + 3));
979  USE(reg_size);
980  return imms << ImmSetBits_offset;
981 }
982 
983 
984 Instr Assembler::ImmRotate(unsigned immr, unsigned reg_size) {
985  DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
986  DCHECK(((reg_size == kXRegSizeInBits) && is_uint6(immr)) ||
987  ((reg_size == kWRegSizeInBits) && is_uint5(immr)));
988  USE(reg_size);
989  return immr << ImmRotate_offset;
990 }
991 
992 
993 Instr Assembler::ImmLLiteral(int imm19) {
994  CHECK(is_int19(imm19));
995  return truncate_to_int19(imm19) << ImmLLiteral_offset;
996 }
997 
998 
999 Instr Assembler::BitN(unsigned bitn, unsigned reg_size) {
1000  DCHECK((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits));
1001  DCHECK((reg_size == kXRegSizeInBits) || (bitn == 0));
1002  USE(reg_size);
1003  return bitn << BitN_offset;
1004 }
1005 
1006 
1007 Instr Assembler::ShiftDP(Shift shift) {
1008  DCHECK(shift == LSL || shift == LSR || shift == ASR || shift == ROR);
1009  return shift << ShiftDP_offset;
1010 }
1011 
1012 
1013 Instr Assembler::ImmDPShift(unsigned amount) {
1014  DCHECK(is_uint6(amount));
1015  return amount << ImmDPShift_offset;
1016 }
1017 
1018 
1019 Instr Assembler::ExtendMode(Extend extend) {
1020  return extend << ExtendMode_offset;
1021 }
1022 
1023 
1024 Instr Assembler::ImmExtendShift(unsigned left_shift) {
1025  DCHECK_LE(left_shift, 4);
1026  return left_shift << ImmExtendShift_offset;
1027 }
1028 
1029 
1030 Instr Assembler::ImmCondCmp(unsigned imm) {
1031  DCHECK(is_uint5(imm));
1032  return imm << ImmCondCmp_offset;
1033 }
1034 
1035 
1036 Instr Assembler::Nzcv(StatusFlags nzcv) {
1037  return ((nzcv >> Flags_offset) & 0xf) << Nzcv_offset;
1038 }
1039 
1040 
1041 Instr Assembler::ImmLSUnsigned(int imm12) {
1042  DCHECK(is_uint12(imm12));
1043  return imm12 << ImmLSUnsigned_offset;
1044 }
1045 
1046 
1047 Instr Assembler::ImmLS(int imm9) {
1048  DCHECK(is_int9(imm9));
1049  return truncate_to_int9(imm9) << ImmLS_offset;
1050 }
1051 
1052 Instr Assembler::ImmLSPair(int imm7, unsigned size) {
1053  DCHECK_EQ((imm7 >> size) << size, imm7);
1054  int scaled_imm7 = imm7 >> size;
1055  DCHECK(is_int7(scaled_imm7));
1056  return truncate_to_int7(scaled_imm7) << ImmLSPair_offset;
1057 }
1058 
1059 
1060 Instr Assembler::ImmShiftLS(unsigned shift_amount) {
1061  DCHECK(is_uint1(shift_amount));
1062  return shift_amount << ImmShiftLS_offset;
1063 }
1064 
1065 
1066 Instr Assembler::ImmException(int imm16) {
1067  DCHECK(is_uint16(imm16));
1068  return imm16 << ImmException_offset;
1069 }
1070 
1071 
1072 Instr Assembler::ImmSystemRegister(int imm15) {
1073  DCHECK(is_uint15(imm15));
1074  return imm15 << ImmSystemRegister_offset;
1075 }
1076 
1077 
1078 Instr Assembler::ImmHint(int imm7) {
1079  DCHECK(is_uint7(imm7));
1080  return imm7 << ImmHint_offset;
1081 }
1082 
1083 
1084 Instr Assembler::ImmBarrierDomain(int imm2) {
1085  DCHECK(is_uint2(imm2));
1086  return imm2 << ImmBarrierDomain_offset;
1087 }
1088 
1089 
1090 Instr Assembler::ImmBarrierType(int imm2) {
1091  DCHECK(is_uint2(imm2));
1092  return imm2 << ImmBarrierType_offset;
1093 }
1094 
1095 unsigned Assembler::CalcLSDataSize(LoadStoreOp op) {
1096  DCHECK((LSSize_offset + LSSize_width) == (kInstrSize * 8));
1097  unsigned size = static_cast<Instr>(op >> LSSize_offset);
1098  if ((op & LSVector_mask) != 0) {
1099  // Vector register memory operations encode the access size in the "size"
1100  // and "opc" fields.
1101  if ((size == 0) && ((op & LSOpc_mask) >> LSOpc_offset) >= 2) {
1102  size = kQRegSizeLog2;
1103  }
1104  }
1105  return size;
1106 }
1107 
1108 
1109 Instr Assembler::ImmMoveWide(int imm) {
1110  DCHECK(is_uint16(imm));
1111  return imm << ImmMoveWide_offset;
1112 }
1113 
1114 
1115 Instr Assembler::ShiftMoveWide(int shift) {
1116  DCHECK(is_uint2(shift));
1117  return shift << ShiftMoveWide_offset;
1118 }
1119 
1120 Instr Assembler::FPType(VRegister fd) { return fd.Is64Bits() ? FP64 : FP32; }
1121 
1122 Instr Assembler::FPScale(unsigned scale) {
1123  DCHECK(is_uint6(scale));
1124  return scale << FPScale_offset;
1125 }
1126 
1127 
1128 const Register& Assembler::AppropriateZeroRegFor(const CPURegister& reg) const {
1129  return reg.Is64Bits() ? xzr : wzr;
1130 }
1131 
1132 
1133 inline void Assembler::CheckBufferSpace() {
1134  DCHECK(pc_ < (buffer_ + buffer_size_));
1135  if (buffer_space() < kGap) {
1136  GrowBuffer();
1137  }
1138 }
1139 
1140 
1141 inline void Assembler::CheckBuffer() {
1142  CheckBufferSpace();
1143  if (pc_offset() >= next_veneer_pool_check_) {
1144  CheckVeneerPool(false, true);
1145  }
1146  if (pc_offset() >= next_constant_pool_check_) {
1147  CheckConstPool(false, true);
1148  }
1149 }
1150 
1151 } // namespace internal
1152 } // namespace v8
1153 
1154 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_
Definition: libplatform.h:13