V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
assembler-arm-inl.h
1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions
6 // are met:
7 //
8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer.
10 //
11 // - Redistribution in binary form must reproduce the above copyright
12 // notice, this list of conditions and the following disclaimer in the
13 // documentation and/or other materials provided with the
14 // distribution.
15 //
16 // - Neither the name of Sun Microsystems or the names of contributors may
17 // be used to endorse or promote products derived from this software without
18 // specific prior written permission.
19 //
20 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
21 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
22 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
23 // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
24 // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
25 // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
26 // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32 
33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved.
36 
37 #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_
38 #define V8_ARM_ASSEMBLER_ARM_INL_H_
39 
40 #include "src/arm/assembler-arm.h"
41 
42 #include "src/assembler.h"
43 #include "src/debug/debug.h"
44 #include "src/objects-inl.h"
45 #include "src/objects/smi.h"
46 
47 namespace v8 {
48 namespace internal {
49 
50 bool CpuFeatures::SupportsOptimizer() { return true; }
51 
52 bool CpuFeatures::SupportsWasmSimd128() { return IsSupported(NEON); }
53 
54 int DoubleRegister::NumRegisters() {
55  return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16;
56 }
57 
58 
59 void RelocInfo::apply(intptr_t delta) {
60  if (RelocInfo::IsInternalReference(rmode_)) {
61  // absolute code pointer inside code object moves with the code object.
62  int32_t* p = reinterpret_cast<int32_t*>(pc_);
63  *p += delta; // relocate entry
64  } else if (RelocInfo::IsRelativeCodeTarget(rmode_)) {
65  Instruction* branch = Instruction::At(pc_);
66  int32_t branch_offset = branch->GetBranchOffset() + delta;
67  branch->SetBranchOffset(branch_offset);
68  }
69 }
70 
71 
72 Address RelocInfo::target_address() {
73  DCHECK(IsCodeTargetMode(rmode_) || IsRuntimeEntry(rmode_) ||
74  IsWasmCall(rmode_));
75  return Assembler::target_address_at(pc_, constant_pool_);
76 }
77 
78 Address RelocInfo::target_address_address() {
79  DCHECK(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) || IsWasmCall(rmode_) ||
80  IsEmbeddedObject(rmode_) || IsExternalReference(rmode_) ||
81  IsOffHeapTarget(rmode_));
82  if (Assembler::IsMovW(Memory<int32_t>(pc_))) {
83  return pc_;
84  } else {
85  DCHECK(Assembler::IsLdrPcImmediateOffset(Memory<int32_t>(pc_)));
86  return constant_pool_entry_address();
87  }
88 }
89 
90 
91 Address RelocInfo::constant_pool_entry_address() {
92  DCHECK(IsInConstantPool());
93  return Assembler::constant_pool_entry_address(pc_, constant_pool_);
94 }
95 
96 
97 int RelocInfo::target_address_size() {
98  return kPointerSize;
99 }
100 
101 HeapObject* RelocInfo::target_object() {
102  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
103  return HeapObject::cast(reinterpret_cast<Object*>(
104  Assembler::target_address_at(pc_, constant_pool_)));
105 }
106 
107 Handle<HeapObject> RelocInfo::target_object_handle(Assembler* origin) {
108  if (IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT) {
109  return Handle<HeapObject>(reinterpret_cast<Address*>(
110  Assembler::target_address_at(pc_, constant_pool_)));
111  }
112  DCHECK(IsRelativeCodeTarget(rmode_));
113  return origin->relative_code_target_object_handle_at(pc_);
114 }
115 
116 void RelocInfo::set_target_object(Heap* heap, HeapObject* target,
117  WriteBarrierMode write_barrier_mode,
118  ICacheFlushMode icache_flush_mode) {
119  DCHECK(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
120  Assembler::set_target_address_at(pc_, constant_pool_,
121  reinterpret_cast<Address>(target),
122  icache_flush_mode);
123  if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != nullptr) {
124  WriteBarrierForCode(host(), this, target);
125  }
126 }
127 
128 
129 Address RelocInfo::target_external_reference() {
130  DCHECK(rmode_ == EXTERNAL_REFERENCE);
131  return Assembler::target_address_at(pc_, constant_pool_);
132 }
133 
134 void RelocInfo::set_target_external_reference(
135  Address target, ICacheFlushMode icache_flush_mode) {
136  DCHECK(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
137  Assembler::set_target_address_at(pc_, constant_pool_, target,
138  icache_flush_mode);
139 }
140 
141 Address RelocInfo::target_internal_reference() {
142  DCHECK(rmode_ == INTERNAL_REFERENCE);
143  return Memory<Address>(pc_);
144 }
145 
146 
147 Address RelocInfo::target_internal_reference_address() {
148  DCHECK(rmode_ == INTERNAL_REFERENCE);
149  return pc_;
150 }
151 
152 Address RelocInfo::target_runtime_entry(Assembler* origin) {
153  DCHECK(IsRuntimeEntry(rmode_));
154  return target_address();
155 }
156 
157 void RelocInfo::set_target_runtime_entry(Address target,
158  WriteBarrierMode write_barrier_mode,
159  ICacheFlushMode icache_flush_mode) {
160  DCHECK(IsRuntimeEntry(rmode_));
161  if (target_address() != target)
162  set_target_address(target, write_barrier_mode, icache_flush_mode);
163 }
164 
165 Address RelocInfo::target_off_heap_target() {
166  DCHECK(IsOffHeapTarget(rmode_));
167  return Assembler::target_address_at(pc_, constant_pool_);
168 }
169 
170 void RelocInfo::WipeOut() {
171  DCHECK(IsEmbeddedObject(rmode_) || IsCodeTarget(rmode_) ||
172  IsRuntimeEntry(rmode_) || IsExternalReference(rmode_) ||
173  IsInternalReference(rmode_) || IsOffHeapTarget(rmode_));
174  if (IsInternalReference(rmode_)) {
175  Memory<Address>(pc_) = kNullAddress;
176  } else {
177  Assembler::set_target_address_at(pc_, constant_pool_, kNullAddress);
178  }
179 }
180 
181 Handle<Code> Assembler::relative_code_target_object_handle_at(
182  Address pc) const {
183  Instruction* branch = Instruction::At(pc);
184  int code_target_index = branch->GetBranchOffset() / kInstrSize;
185  return GetCodeTarget(code_target_index);
186 }
187 
188 template <typename ObjectVisitor>
189 void RelocInfo::Visit(ObjectVisitor* visitor) {
190  RelocInfo::Mode mode = rmode();
191  if (mode == RelocInfo::EMBEDDED_OBJECT) {
192  visitor->VisitEmbeddedPointer(host(), this);
193  } else if (RelocInfo::IsCodeTargetMode(mode)) {
194  visitor->VisitCodeTarget(host(), this);
195  } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
196  visitor->VisitExternalReference(host(), this);
197  } else if (mode == RelocInfo::INTERNAL_REFERENCE) {
198  visitor->VisitInternalReference(host(), this);
199  } else if (RelocInfo::IsRuntimeEntry(mode)) {
200  visitor->VisitRuntimeEntry(host(), this);
201  } else if (RelocInfo::IsOffHeapTarget(mode)) {
202  visitor->VisitOffHeapTarget(host(), this);
203  }
204 }
205 
206 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) : rmode_(rmode) {
207  value_.immediate = immediate;
208 }
209 
210 Operand Operand::Zero() { return Operand(static_cast<int32_t>(0)); }
211 
212 Operand::Operand(const ExternalReference& f)
213  : rmode_(RelocInfo::EXTERNAL_REFERENCE) {
214  value_.immediate = static_cast<int32_t>(f.address());
215 }
216 
217 Operand::Operand(Smi value) : rmode_(RelocInfo::NONE) {
218  value_.immediate = static_cast<intptr_t>(value.ptr());
219 }
220 
221 Operand::Operand(Register rm) : rm_(rm), shift_op_(LSL), shift_imm_(0) {}
222 
223 void Assembler::CheckBuffer() {
224  if (buffer_space() <= kGap) {
225  GrowBuffer();
226  }
227  MaybeCheckConstPool();
228 }
229 
230 
231 void Assembler::emit(Instr x) {
232  CheckBuffer();
233  *reinterpret_cast<Instr*>(pc_) = x;
234  pc_ += kInstrSize;
235 }
236 
237 
238 Address Assembler::target_address_from_return_address(Address pc) {
239  // Returns the address of the call target from the return address that will
240  // be returned to after a call.
241  // Call sequence on V7 or later is:
242  // movw ip, #... @ call address low 16
243  // movt ip, #... @ call address high 16
244  // blx ip
245  // @ return address
246  // For V6 when the constant pool is unavailable, it is:
247  // mov ip, #... @ call address low 8
248  // orr ip, ip, #... @ call address 2nd 8
249  // orr ip, ip, #... @ call address 3rd 8
250  // orr ip, ip, #... @ call address high 8
251  // blx ip
252  // @ return address
253  // In cases that need frequent patching, the address is in the
254  // constant pool. It could be a small constant pool load:
255  // ldr ip, [pc, #...] @ call address
256  // blx ip
257  // @ return address
258  Address candidate = pc - 2 * kInstrSize;
259  Instr candidate_instr(Memory<int32_t>(candidate));
260  if (IsLdrPcImmediateOffset(candidate_instr)) {
261  return candidate;
262  } else {
263  if (CpuFeatures::IsSupported(ARMv7)) {
264  candidate -= 1 * kInstrSize;
265  DCHECK(IsMovW(Memory<int32_t>(candidate)) &&
266  IsMovT(Memory<int32_t>(candidate + kInstrSize)));
267  } else {
268  candidate -= 3 * kInstrSize;
269  DCHECK(IsMovImmed(Memory<int32_t>(candidate)) &&
270  IsOrrImmed(Memory<int32_t>(candidate + kInstrSize)) &&
271  IsOrrImmed(Memory<int32_t>(candidate + 2 * kInstrSize)) &&
272  IsOrrImmed(Memory<int32_t>(candidate + 3 * kInstrSize)));
273  }
274  return candidate;
275  }
276 }
277 
278 
279 Address Assembler::return_address_from_call_start(Address pc) {
280  if (IsLdrPcImmediateOffset(Memory<int32_t>(pc))) {
281  // Load from constant pool, small section.
282  return pc + kInstrSize * 2;
283  } else {
284  if (CpuFeatures::IsSupported(ARMv7)) {
285  DCHECK(IsMovW(Memory<int32_t>(pc)));
286  DCHECK(IsMovT(Memory<int32_t>(pc + kInstrSize)));
287  // A movw / movt load immediate.
288  return pc + kInstrSize * 3;
289  } else {
290  DCHECK(IsMovImmed(Memory<int32_t>(pc)));
291  DCHECK(IsOrrImmed(Memory<int32_t>(pc + kInstrSize)));
292  DCHECK(IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)));
293  DCHECK(IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
294  // A mov / orr load immediate.
295  return pc + kInstrSize * 5;
296  }
297  }
298 }
299 
300 void Assembler::deserialization_set_special_target_at(
301  Address constant_pool_entry, Code code, Address target) {
302  Memory<Address>(constant_pool_entry) = target;
303 }
304 
305 int Assembler::deserialization_special_target_size(Address location) {
306  return kSpecialTargetSize;
307 }
308 
309 void Assembler::deserialization_set_target_internal_reference_at(
310  Address pc, Address target, RelocInfo::Mode mode) {
311  Memory<Address>(pc) = target;
312 }
313 
314 
315 bool Assembler::is_constant_pool_load(Address pc) {
316  return IsLdrPcImmediateOffset(Memory<int32_t>(pc));
317 }
318 
319 
320 Address Assembler::constant_pool_entry_address(Address pc,
321  Address constant_pool) {
322  DCHECK(Assembler::IsLdrPcImmediateOffset(Memory<int32_t>(pc)));
323  Instr instr = Memory<int32_t>(pc);
324  return pc + GetLdrRegisterImmediateOffset(instr) + Instruction::kPcLoadDelta;
325 }
326 
327 
328 Address Assembler::target_address_at(Address pc, Address constant_pool) {
329  if (is_constant_pool_load(pc)) {
330  // This is a constant pool lookup. Return the value in the constant pool.
331  return Memory<Address>(constant_pool_entry_address(pc, constant_pool));
332  } else if (CpuFeatures::IsSupported(ARMv7) && IsMovW(Memory<int32_t>(pc))) {
333  // This is an movw / movt immediate load. Return the immediate.
334  DCHECK(IsMovW(Memory<int32_t>(pc)) &&
335  IsMovT(Memory<int32_t>(pc + kInstrSize)));
336  Instruction* movw_instr = Instruction::At(pc);
337  Instruction* movt_instr = Instruction::At(pc + kInstrSize);
338  return static_cast<Address>((movt_instr->ImmedMovwMovtValue() << 16) |
339  movw_instr->ImmedMovwMovtValue());
340  } else if (IsMovImmed(Memory<int32_t>(pc))) {
341  // This is an mov / orr immediate load. Return the immediate.
342  DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
343  IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
344  IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
345  IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
346  Instr mov_instr = instr_at(pc);
347  Instr orr_instr_1 = instr_at(pc + kInstrSize);
348  Instr orr_instr_2 = instr_at(pc + 2 * kInstrSize);
349  Instr orr_instr_3 = instr_at(pc + 3 * kInstrSize);
350  Address ret = static_cast<Address>(
351  DecodeShiftImm(mov_instr) | DecodeShiftImm(orr_instr_1) |
352  DecodeShiftImm(orr_instr_2) | DecodeShiftImm(orr_instr_3));
353  return ret;
354  } else {
355  Instruction* branch = Instruction::At(pc);
356  int32_t delta = branch->GetBranchOffset();
357  return pc + delta + Instruction::kPcLoadDelta;
358  }
359 }
360 
361 void Assembler::set_target_address_at(Address pc, Address constant_pool,
362  Address target,
363  ICacheFlushMode icache_flush_mode) {
364  if (is_constant_pool_load(pc)) {
365  // This is a constant pool lookup. Update the entry in the constant pool.
366  Memory<Address>(constant_pool_entry_address(pc, constant_pool)) = target;
367  // Intuitively, we would think it is necessary to always flush the
368  // instruction cache after patching a target address in the code as follows:
369  // Assembler::FlushICache(pc, sizeof(target));
370  // However, on ARM, no instruction is actually patched in the case
371  // of embedded constants of the form:
372  // ldr ip, [pp, #...]
373  // since the instruction accessing this address in the constant pool remains
374  // unchanged.
375  } else if (CpuFeatures::IsSupported(ARMv7) && IsMovW(Memory<int32_t>(pc))) {
376  // This is an movw / movt immediate load. Patch the immediate embedded in
377  // the instructions.
378  DCHECK(IsMovW(Memory<int32_t>(pc)));
379  DCHECK(IsMovT(Memory<int32_t>(pc + kInstrSize)));
380  uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
381  uint32_t immediate = static_cast<uint32_t>(target);
382  instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF);
383  instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16);
384  DCHECK(IsMovW(Memory<int32_t>(pc)));
385  DCHECK(IsMovT(Memory<int32_t>(pc + kInstrSize)));
386  if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
387  Assembler::FlushICache(pc, 2 * kInstrSize);
388  }
389  } else if (IsMovImmed(Memory<int32_t>(pc))) {
390  // This is an mov / orr immediate load. Patch the immediate embedded in
391  // the instructions.
392  DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
393  IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
394  IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
395  IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
396  uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc);
397  uint32_t immediate = static_cast<uint32_t>(target);
398  instr_ptr[0] = PatchShiftImm(instr_ptr[0], immediate & kImm8Mask);
399  instr_ptr[1] = PatchShiftImm(instr_ptr[1], immediate & (kImm8Mask << 8));
400  instr_ptr[2] = PatchShiftImm(instr_ptr[2], immediate & (kImm8Mask << 16));
401  instr_ptr[3] = PatchShiftImm(instr_ptr[3], immediate & (kImm8Mask << 24));
402  DCHECK(IsMovImmed(Memory<int32_t>(pc)) &&
403  IsOrrImmed(Memory<int32_t>(pc + kInstrSize)) &&
404  IsOrrImmed(Memory<int32_t>(pc + 2 * kInstrSize)) &&
405  IsOrrImmed(Memory<int32_t>(pc + 3 * kInstrSize)));
406  if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
407  Assembler::FlushICache(pc, 4 * kInstrSize);
408  }
409  } else {
410  intptr_t branch_offset = target - pc - Instruction::kPcLoadDelta;
411  Instruction* branch = Instruction::At(pc);
412  branch->SetBranchOffset(branch_offset);
413  if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
414  Assembler::FlushICache(pc, kInstrSize);
415  }
416  }
417 }
418 
419 EnsureSpace::EnsureSpace(Assembler* assembler) { assembler->CheckBuffer(); }
420 
421 template <typename T>
422 bool UseScratchRegisterScope::CanAcquireVfp() const {
423  VfpRegList* available = assembler_->GetScratchVfpRegisterList();
424  DCHECK_NOT_NULL(available);
425  for (int index = 0; index < T::kNumRegisters; index++) {
426  T reg = T::from_code(index);
427  uint64_t mask = reg.ToVfpRegList();
428  if ((*available & mask) == mask) {
429  return true;
430  }
431  }
432  return false;
433 }
434 
435 template <typename T>
436 T UseScratchRegisterScope::AcquireVfp() {
437  VfpRegList* available = assembler_->GetScratchVfpRegisterList();
438  DCHECK_NOT_NULL(available);
439  for (int index = 0; index < T::kNumRegisters; index++) {
440  T reg = T::from_code(index);
441  uint64_t mask = reg.ToVfpRegList();
442  if ((*available & mask) == mask) {
443  *available &= ~mask;
444  return reg;
445  }
446  }
447  UNREACHABLE();
448 }
449 
450 } // namespace internal
451 } // namespace v8
452 
453 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_
Definition: libplatform.h:13