37 #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_ 38 #define V8_ARM_ASSEMBLER_ARM_INL_H_ 40 #include "src/arm/assembler-arm.h" 42 #include "src/assembler.h" 43 #include "src/debug/debug.h" 44 #include "src/objects-inl.h" 45 #include "src/objects/smi.h" 50 bool CpuFeatures::SupportsOptimizer() {
return true; }
52 bool CpuFeatures::SupportsWasmSimd128() {
return IsSupported(NEON); }
54 int DoubleRegister::NumRegisters() {
55 return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
59 void RelocInfo::apply(intptr_t delta) {
60 if (RelocInfo::IsInternalReference(rmode_)) {
62 int32_t* p =
reinterpret_cast<int32_t*
>(pc_);
64 }
else if (RelocInfo::IsRelativeCodeTarget(rmode_)) {
65 Instruction* branch = Instruction::At(pc_);
66 int32_t branch_offset = branch->GetBranchOffset() + delta;
67 branch->SetBranchOffset(branch_offset);
72 Address RelocInfo::target_address() {
73 DCHECK(IsCodeTargetMode(rmode_) || IsRuntimeEntry(rmode_) ||
75 return Assembler::target_address_at(pc_, constant_pool_);
78 Address RelocInfo::target_address_address() {
79 DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_) ||
80 IsEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
81 IsOffHeapTarget(rmode_));
82 if (Assembler::IsMovW(Memory<int32_t>(pc_))) {
85 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory<int32_t>(pc_)));
86 return constant_pool_entry_address();
91 Address RelocInfo::constant_pool_entry_address() {
92 DCHECK(IsInConstantPool());
93 return Assembler::constant_pool_entry_address(pc_, constant_pool_);
97 int RelocInfo::target_address_size() {
101 HeapObject* RelocInfo::target_object() {
102 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
103 return HeapObject::cast(reinterpret_cast<Object*>(
104 Assembler::target_address_at(pc_, constant_pool_)));
107 Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
108 if (IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT) {
109 return Handle<HeapObject>(
reinterpret_cast<Address*
>(
110 Assembler::target_address_at(pc_, constant_pool_)));
112 DCHECK(IsRelativeCodeTarget(rmode_));
113 return origin->relative_code_target_object_handle_at(pc_);
116 void RelocInfo::set_target_object(Heap* heap, HeapObject* target,
117 WriteBarrierMode write_barrier_mode,
118 ICacheFlushMode icache_flush_mode) {
119 DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
120 Assembler::set_target_address_at(pc_, constant_pool_,
121 reinterpret_cast<Address>(target),
123 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() !=
nullptr) {
124 WriteBarrierForCode(host(),
this, target);
129 Address RelocInfo::target_external_reference() {
130 DCHECK(rmode_ == EXTERNAL_REFERENCE);
131 return Assembler::target_address_at(pc_, constant_pool_);
134 void RelocInfo::set_target_external_reference(
135 Address target, ICacheFlushMode icache_flush_mode) {
136 DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
137 Assembler::set_target_address_at(pc_, constant_pool_, target,
141 Address RelocInfo::target_internal_reference() {
142 DCHECK(rmode_ == INTERNAL_REFERENCE);
143 return Memory<Address>(pc_);
147 Address RelocInfo::target_internal_reference_address() {
148 DCHECK(rmode_ == INTERNAL_REFERENCE);
152 Address RelocInfo::target_runtime_entry(Assembler* origin) {
153 DCHECK(IsRuntimeEntry(rmode_));
154 return target_address();
157 void RelocInfo::set_target_runtime_entry(Address target,
158 WriteBarrierMode write_barrier_mode,
159 ICacheFlushMode icache_flush_mode) {
160 DCHECK(IsRuntimeEntry(rmode_));
161 if (target_address() != target)
162 set_target_address(target, write_barrier_mode, icache_flush_mode);
165 Address RelocInfo::target_off_heap_target() {
166 DCHECK(IsOffHeapTarget(rmode_));
167 return Assembler::target_address_at(pc_, constant_pool_);
170 void RelocInfo::WipeOut() {
171 DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
172 IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
173 IsInternalReference(rmode_) || IsOffHeapTarget(rmode_));
174 if (IsInternalReference(rmode_)) {
175 Memory<Address>(pc_) = kNullAddress;
177 Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
181 Handle<Code> Assembler::relative_code_target_object_handle_at(
183 Instruction* branch = Instruction::At(pc);
184 int code_target_index = branch->GetBranchOffset() / kInstrSize;
185 return GetCodeTarget(code_target_index);
188 template <
typename ObjectVisitor>
189 void RelocInfo::Visit(ObjectVisitor* visitor) {
190 RelocInfo::Mode mode = rmode();
191 if (mode == RelocInfo::EMBEDDED_OBJECT) {
192 visitor->VisitEmbeddedPointer(host(),
this);
193 }
else if (RelocInfo::IsCodeTargetMode(mode)) {
194 visitor->VisitCodeTarget(host(),
this);
195 }
else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
196 visitor->VisitExternalReference(host(),
this);
197 }
else if (mode == RelocInfo::INTERNAL_REFERENCE) {
198 visitor->VisitInternalReference(host(),
this);
199 }
else if (RelocInfo::IsRuntimeEntry(mode)) {
200 visitor->VisitRuntimeEntry(host(),
this);
201 }
else if (RelocInfo::IsOffHeapTarget(mode)) {
202 visitor->VisitOffHeapTarget(host(),
this);
206 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) : rmode_(rmode) {
207 value_.immediate = immediate;
210 Operand Operand::Zero() {
return Operand(static_cast<int32_t>(0)); }
212 Operand::Operand(
const ExternalReference& f)
213 : rmode_(RelocInfo::EXTERNAL_REFERENCE) {
214 value_.immediate =
static_cast<int32_t
>(f.address());
217 Operand::Operand(Smi value) : rmode_(RelocInfo::NONE) {
218 value_.immediate =
static_cast<intptr_t
>(value.ptr());
221 Operand::Operand(Register rm) : rm_(rm), shift_op_(LSL), shift_imm_(0) {}
223 void Assembler::CheckBuffer() {
224 if (buffer_space() <= kGap) {
227 MaybeCheckConstPool();
231 void Assembler::emit(Instr x) {
233 *
reinterpret_cast<Instr*
>(pc_) = x;
238 Address Assembler::target_address_from_return_address(Address pc) {
258 Address candidate = pc - 2 * kInstrSize;
259 Instr candidate_instr(Memory<int32_t>(candidate));
260 if (IsLdrPcImmediateOffset(candidate_instr)) {
263 if (CpuFeatures::IsSupported(ARMv7)) {
264 candidate -= 1 * kInstrSize;
265 DCHECK(IsMovW(Memory<int32_t>(candidate)) &&
266 IsMovT(Memory<int32_t>(candidate + kInstrSize)));
268 candidate -= 3 * kInstrSize;
269 DCHECK(IsMovImmed(Memory<int32_t>(candidate)) &&
270 IsOrrImmed(Memory<int32_t>(candidate + kInstrSize)) &&
271 IsOrrImmed(Memory<int32_t>(candidate + 2 * kInstrSize)) &&
272 IsOrrImmed(Memory<int32_t>(candidate + 3 * kInstrSize)));
279 Address Assembler::return_address_from_call_start(Address pc) {
280 if (IsLdrPcImmediateOffset(Memory<int32_t>(pc))) {
282 return pc + kInstrSize * 2;
284 if (CpuFeatures::IsSupported(ARMv7)) {
285 DCHECK(IsMovW(Memory<int32_t>(pc)));
286 DCHECK(IsMovT(Memory<int32_t>(pc + kInstrSize)));
288 return pc + kInstrSize * 3;
290 DCHECK(IsMovImmed(Memory<int32_t>(pc)));
291 DCHECK(IsOrrImmed(Memory<int32_t>(pc + kInstrSize)));
292 DCHECK(IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)));
293 DCHECK(IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
295 return pc + kInstrSize * 5;
300 void Assembler::deserialization_set_special_target_at(
301 Address constant_pool_entry, Code code, Address target) {
302 Memory<Address>(constant_pool_entry) = target;
305 int Assembler::deserialization_special_target_size(Address location) {
306 return kSpecialTargetSize;
309 void Assembler::deserialization_set_target_internal_reference_at(
310 Address pc, Address target, RelocInfo::Mode mode) {
311 Memory<Address>(pc) = target;
315 bool Assembler::is_constant_pool_load(Address pc) {
316 return IsLdrPcImmediateOffset(Memory<int32_t>(pc));
320 Address Assembler::constant_pool_entry_address(Address pc,
321 Address constant_pool) {
322 DCHECK(Assembler::IsLdrPcImmediateOffset(Memory<int32_t>(pc)));
323 Instr instr = Memory<int32_t>(pc);
324 return pc + GetLdrRegisterImmediateOffset(instr) + Instruction::kPcLoadDelta;
328 Address Assembler::target_address_at(Address pc, Address constant_pool) {
329 if (is_constant_pool_load(pc)) {
331 return Memory<Address>(constant_pool_entry_address(pc, constant_pool));
332 }
else if (CpuFeatures::IsSupported(ARMv7) && IsMovW(Memory<int32_t>(pc))) {
334 DCHECK(IsMovW(Memory<int32_t>(pc)) &&
335 IsMovT(Memory<int32_t>(pc + kInstrSize)));
336 Instruction* movw_instr = Instruction::At(pc);
337 Instruction* movt_instr = Instruction::At(pc + kInstrSize);
338 return static_cast<Address
>((movt_instr->ImmedMovwMovtValue() << 16) |
339 movw_instr->ImmedMovwMovtValue());
340 }
else if (IsMovImmed(Memory<int32_t>(pc))) {
342 DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
343 IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
344 IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
345 IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
346 Instr mov_instr = instr_at(pc);
347 Instr orr_instr_1 = instr_at(pc + kInstrSize);
348 Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
349 Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
350 Address ret =
static_cast<Address
>(
351 DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
352 DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
355 Instruction* branch = Instruction::At(pc);
356 int32_t delta = branch->GetBranchOffset();
357 return pc + delta + Instruction::kPcLoadDelta;
361 void Assembler::set_target_address_at(Address pc, Address constant_pool,
363 ICacheFlushMode icache_flush_mode) {
364 if (is_constant_pool_load(pc)) {
366 Memory<Address>(constant_pool_entry_address(pc, constant_pool)) = target;
375 }
else if (CpuFeatures::IsSupported(ARMv7) && IsMovW(Memory<int32_t>(pc))) {
378 DCHECK(IsMovW(Memory<int32_t>(pc)));
379 DCHECK(IsMovT(Memory<int32_t>(pc + kInstrSize)));
382 instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
383 instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
384 DCHECK(IsMovW(Memory<int32_t>(pc)));
385 DCHECK(IsMovT(Memory<int32_t>(pc + kInstrSize)));
386 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
387 Assembler::FlushICache(pc, 2 * kInstrSize);
389 }
else if (IsMovImmed(Memory<int32_t>(pc))) {
392 DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
393 IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
394 IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
395 IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
398 instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask);
399 instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8));
400 instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16));
401 instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24));
402 DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
403 IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
404 IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
405 IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
406 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
407 Assembler::FlushICache(pc, 4 * kInstrSize);
410 intptr_t branch_offset = target - pc - Instruction::kPcLoadDelta;
411 Instruction* branch = Instruction::At(pc);
412 branch->SetBranchOffset(branch_offset);
413 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
414 Assembler::FlushICache(pc, kInstrSize);
419 EnsureSpace::EnsureSpace(Assembler* assembler) { assembler->CheckBuffer(); }
421 template <
typename T>
422 bool UseScratchRegisterScope::CanAcquireVfp()
const {
423 VfpRegList* available = assembler_->GetScratchVfpRegisterList();
424 DCHECK_NOT_NULL(available);
425 for (
int index = 0; index < T::kNumRegisters; index++) {
426 T reg = T::from_code(index);
427 uint64_t mask = reg.ToVfpRegList();
428 if ((*available & mask) == mask) {
435 template <
typename T>
436 T UseScratchRegisterScope::AcquireVfp() {
437 VfpRegList* available = assembler_->GetScratchVfpRegisterList();
438 DCHECK_NOT_NULL(available);
439 for (
int index = 0; index < T::kNumRegisters; index++) {
440 T reg = T::from_code(index);
441 uint64_t mask = reg.ToVfpRegList();
442 if ((*available & mask) == mask) {
453 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_