V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
macro-assembler-x64.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef INCLUDED_FROM_MACRO_ASSEMBLER_H
6 #error This header must be included via macro-assembler.h
7 #endif
8 
9 #ifndef V8_X64_MACRO_ASSEMBLER_X64_H_
10 #define V8_X64_MACRO_ASSEMBLER_X64_H_
11 
12 #include "src/bailout-reason.h"
13 #include "src/base/flags.h"
14 #include "src/contexts.h"
15 #include "src/globals.h"
16 #include "src/x64/assembler-x64.h"
17 
18 namespace v8 {
19 namespace internal {
20 
21 // Give alias names to registers for calling conventions.
22 constexpr Register kReturnRegister0 = rax;
23 constexpr Register kReturnRegister1 = rdx;
24 constexpr Register kReturnRegister2 = r8;
25 constexpr Register kJSFunctionRegister = rdi;
26 constexpr Register kContextRegister = rsi;
27 constexpr Register kAllocateSizeRegister = rdx;
28 constexpr Register kSpeculationPoisonRegister = r12;
29 constexpr Register kInterpreterAccumulatorRegister = rax;
30 constexpr Register kInterpreterBytecodeOffsetRegister = r9;
31 constexpr Register kInterpreterBytecodeArrayRegister = r14;
32 constexpr Register kInterpreterDispatchTableRegister = r15;
33 
34 constexpr Register kJavaScriptCallArgCountRegister = rax;
35 constexpr Register kJavaScriptCallCodeStartRegister = rcx;
36 constexpr Register kJavaScriptCallTargetRegister = kJSFunctionRegister;
37 constexpr Register kJavaScriptCallNewTargetRegister = rdx;
38 constexpr Register kJavaScriptCallExtraArg1Register = rbx;
39 
40 constexpr Register kRuntimeCallFunctionRegister = rbx;
41 constexpr Register kRuntimeCallArgCountRegister = rax;
42 constexpr Register kRuntimeCallArgvRegister = r15;
43 constexpr Register kWasmInstanceRegister = rsi;
44 
45 // Default scratch register used by MacroAssembler (and other code that needs
46 // a spare register). The register isn't callee save, and not used by the
47 // function calling convention.
48 constexpr Register kScratchRegister = r10;
49 constexpr XMMRegister kScratchDoubleReg = xmm15;
50 constexpr Register kRootRegister = r13; // callee save
51 
52 constexpr Register kOffHeapTrampolineRegister = kScratchRegister;
53 
54 // Convenience for platform-independent signatures.
55 typedef Operand MemOperand;
56 
57 class StringConstantBase;
58 
59 enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET };
60 enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK };
61 
62 struct SmiIndex {
63  SmiIndex(Register index_register, ScaleFactor scale)
64  : reg(index_register),
65  scale(scale) {}
66  Register reg;
67  ScaleFactor scale;
68 };
69 
70 enum StackArgumentsAccessorReceiverMode {
71  ARGUMENTS_CONTAIN_RECEIVER,
72  ARGUMENTS_DONT_CONTAIN_RECEIVER
73 };
74 
76  public:
77  StackArgumentsAccessor(Register base_reg, int argument_count_immediate,
78  StackArgumentsAccessorReceiverMode receiver_mode =
79  ARGUMENTS_CONTAIN_RECEIVER,
80  int extra_displacement_to_last_argument = 0)
81  : base_reg_(base_reg),
82  argument_count_reg_(no_reg),
83  argument_count_immediate_(argument_count_immediate),
84  receiver_mode_(receiver_mode),
85  extra_displacement_to_last_argument_(
86  extra_displacement_to_last_argument) {}
87 
88  StackArgumentsAccessor(Register base_reg, Register argument_count_reg,
89  StackArgumentsAccessorReceiverMode receiver_mode =
90  ARGUMENTS_CONTAIN_RECEIVER,
91  int extra_displacement_to_last_argument = 0)
92  : base_reg_(base_reg),
93  argument_count_reg_(argument_count_reg),
94  argument_count_immediate_(0),
95  receiver_mode_(receiver_mode),
96  extra_displacement_to_last_argument_(
97  extra_displacement_to_last_argument) {}
98 
100  const ParameterCount& parameter_count,
101  StackArgumentsAccessorReceiverMode receiver_mode =
102  ARGUMENTS_CONTAIN_RECEIVER,
103  int extra_displacement_to_last_argument = 0);
104 
105  Operand GetArgumentOperand(int index);
106  Operand GetReceiverOperand() {
107  DCHECK(receiver_mode_ == ARGUMENTS_CONTAIN_RECEIVER);
108  return GetArgumentOperand(0);
109  }
110 
111  private:
112  const Register base_reg_;
113  const Register argument_count_reg_;
114  const int argument_count_immediate_;
115  const StackArgumentsAccessorReceiverMode receiver_mode_;
116  const int extra_displacement_to_last_argument_;
117 
118  DISALLOW_IMPLICIT_CONSTRUCTORS(StackArgumentsAccessor);
119 };
120 
121 class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
122  public:
123  TurboAssembler(const AssemblerOptions& options, void* buffer, int buffer_size)
124  : TurboAssemblerBase(options, buffer, buffer_size) {}
125 
126  TurboAssembler(Isolate* isolate, const AssemblerOptions& options,
127  void* buffer, int buffer_size,
128  CodeObjectRequired create_code_object)
129  : TurboAssemblerBase(isolate, options, buffer, buffer_size,
130  create_code_object) {}
131 
132  template <typename Dst, typename... Args>
133  struct AvxHelper {
134  Assembler* assm;
135  // Call an method where the AVX version expects the dst argument to be
136  // duplicated.
137  template <void (Assembler::*avx)(Dst, Dst, Args...),
138  void (Assembler::*no_avx)(Dst, Args...)>
139  void emit(Dst dst, Args... args) {
140  if (CpuFeatures::IsSupported(AVX)) {
141  CpuFeatureScope scope(assm, AVX);
142  (assm->*avx)(dst, dst, args...);
143  } else {
144  (assm->*no_avx)(dst, args...);
145  }
146  }
147 
148  // Call an method where the AVX version expects no duplicated dst argument.
149  template <void (Assembler::*avx)(Dst, Args...),
150  void (Assembler::*no_avx)(Dst, Args...)>
151  void emit(Dst dst, Args... args) {
152  if (CpuFeatures::IsSupported(AVX)) {
153  CpuFeatureScope scope(assm, AVX);
154  (assm->*avx)(dst, args...);
155  } else {
156  (assm->*no_avx)(dst, args...);
157  }
158  }
159  };
160 
161 #define AVX_OP(macro_name, name) \
162  template <typename Dst, typename... Args> \
163  void macro_name(Dst dst, Args... args) { \
164  AvxHelper<Dst, Args...>{this} \
165  .template emit<&Assembler::v##name, &Assembler::name>(dst, args...); \
166  }
167 
168  AVX_OP(Subsd, subsd)
169  AVX_OP(Divss, divss)
170  AVX_OP(Divsd, divsd)
171  AVX_OP(Xorps, xorps)
172  AVX_OP(Xorpd, xorpd)
173  AVX_OP(Movd, movd)
174  AVX_OP(Movq, movq)
175  AVX_OP(Movaps, movaps)
176  AVX_OP(Movapd, movapd)
177  AVX_OP(Movups, movups)
178  AVX_OP(Movmskps, movmskps)
179  AVX_OP(Movmskpd, movmskpd)
180  AVX_OP(Movss, movss)
181  AVX_OP(Movsd, movsd)
182  AVX_OP(Pcmpeqd, pcmpeqd)
183  AVX_OP(Pslld, pslld)
184  AVX_OP(Psllq, psllq)
185  AVX_OP(Psrld, psrld)
186  AVX_OP(Psrlq, psrlq)
187  AVX_OP(Addsd, addsd)
188  AVX_OP(Mulsd, mulsd)
189  AVX_OP(Andps, andps)
190  AVX_OP(Andpd, andpd)
191  AVX_OP(Orpd, orpd)
192  AVX_OP(Cmpeqps, cmpeqps)
193  AVX_OP(Cmpltps, cmpltps)
194  AVX_OP(Cmpleps, cmpleps)
195  AVX_OP(Cmpneqps, cmpneqps)
196  AVX_OP(Cmpnltps, cmpnltps)
197  AVX_OP(Cmpnleps, cmpnleps)
198  AVX_OP(Cmpeqpd, cmpeqpd)
199  AVX_OP(Cmpltpd, cmpltpd)
200  AVX_OP(Cmplepd, cmplepd)
201  AVX_OP(Cmpneqpd, cmpneqpd)
202  AVX_OP(Cmpnltpd, cmpnltpd)
203  AVX_OP(Cmpnlepd, cmpnlepd)
204  AVX_OP(Roundss, roundss)
205  AVX_OP(Roundsd, roundsd)
206  AVX_OP(Sqrtss, sqrtss)
207  AVX_OP(Sqrtsd, sqrtsd)
208  AVX_OP(Ucomiss, ucomiss)
209  AVX_OP(Ucomisd, ucomisd)
210 
211 #undef AVX_OP
212 
213  void PushReturnAddressFrom(Register src) { pushq(src); }
214  void PopReturnAddressTo(Register dst) { popq(dst); }
215 
216  void Ret();
217 
218  // Return and drop arguments from stack, where the number of arguments
219  // may be bigger than 2^16 - 1. Requires a scratch register.
220  void Ret(int bytes_dropped, Register scratch);
221 
222  // Load a register with a long value as efficiently as possible.
223  void Set(Register dst, int64_t x);
224  void Set(Operand dst, intptr_t x);
225 
226  // Operations on roots in the root-array.
227  void LoadRoot(Register destination, RootIndex index) override;
228  void LoadRoot(Operand destination, RootIndex index) {
229  LoadRoot(kScratchRegister, index);
230  movp(destination, kScratchRegister);
231  }
232 
233  void Push(Register src);
234  void Push(Operand src);
235  void Push(Immediate value);
236  void Push(Smi smi);
237  void Push(Handle<HeapObject> source);
238 
239  // Before calling a C-function from generated code, align arguments on stack.
240  // After aligning the frame, arguments must be stored in rsp[0], rsp[8],
241  // etc., not pushed. The argument count assumes all arguments are word sized.
242  // The number of slots reserved for arguments depends on platform. On Windows
243  // stack slots are reserved for the arguments passed in registers. On other
244  // platforms stack slots are only reserved for the arguments actually passed
245  // on the stack.
246  void PrepareCallCFunction(int num_arguments);
247 
248  // Calls a C function and cleans up the space for arguments allocated
249  // by PrepareCallCFunction. The called function is not allowed to trigger a
250  // garbage collection, since that might move the code and invalidate the
251  // return address (unless this is somehow accounted for by the called
252  // function).
253  void CallCFunction(ExternalReference function, int num_arguments);
254  void CallCFunction(Register function, int num_arguments);
255 
256  // Calculate the number of stack slots to reserve for arguments when calling a
257  // C function.
258  int ArgumentStackSlotsForCFunctionCall(int num_arguments);
259 
260  void CheckPageFlag(Register object, Register scratch, int mask, Condition cc,
261  Label* condition_met,
262  Label::Distance condition_met_distance = Label::kFar);
263 
264  void Cvtss2sd(XMMRegister dst, XMMRegister src);
265  void Cvtss2sd(XMMRegister dst, Operand src);
266  void Cvtsd2ss(XMMRegister dst, XMMRegister src);
267  void Cvtsd2ss(XMMRegister dst, Operand src);
268  void Cvttsd2si(Register dst, XMMRegister src);
269  void Cvttsd2si(Register dst, Operand src);
270  void Cvttsd2siq(Register dst, XMMRegister src);
271  void Cvttsd2siq(Register dst, Operand src);
272  void Cvttss2si(Register dst, XMMRegister src);
273  void Cvttss2si(Register dst, Operand src);
274  void Cvttss2siq(Register dst, XMMRegister src);
275  void Cvttss2siq(Register dst, Operand src);
276  void Cvtqsi2ss(XMMRegister dst, Register src);
277  void Cvtqsi2ss(XMMRegister dst, Operand src);
278  void Cvtqsi2sd(XMMRegister dst, Register src);
279  void Cvtqsi2sd(XMMRegister dst, Operand src);
280  void Cvtlsi2ss(XMMRegister dst, Register src);
281  void Cvtlsi2ss(XMMRegister dst, Operand src);
282  void Cvtlui2ss(XMMRegister dst, Register src);
283  void Cvtlui2ss(XMMRegister dst, Operand src);
284  void Cvtlui2sd(XMMRegister dst, Register src);
285  void Cvtlui2sd(XMMRegister dst, Operand src);
286  void Cvtqui2ss(XMMRegister dst, Register src);
287  void Cvtqui2ss(XMMRegister dst, Operand src);
288  void Cvtqui2sd(XMMRegister dst, Register src);
289  void Cvtqui2sd(XMMRegister dst, Operand src);
290  void Cvttsd2uiq(Register dst, Operand src, Label* fail = nullptr);
291  void Cvttsd2uiq(Register dst, XMMRegister src, Label* fail = nullptr);
292  void Cvttss2uiq(Register dst, Operand src, Label* fail = nullptr);
293  void Cvttss2uiq(Register dst, XMMRegister src, Label* fail = nullptr);
294 
295  // cvtsi2sd instruction only writes to the low 64-bit of dst register, which
296  // hinders register renaming and makes dependence chains longer. So we use
297  // xorpd to clear the dst register before cvtsi2sd to solve this issue.
298  void Cvtlsi2sd(XMMRegister dst, Register src);
299  void Cvtlsi2sd(XMMRegister dst, Operand src);
300 
301  void Lzcntq(Register dst, Register src);
302  void Lzcntq(Register dst, Operand src);
303  void Lzcntl(Register dst, Register src);
304  void Lzcntl(Register dst, Operand src);
305  void Tzcntq(Register dst, Register src);
306  void Tzcntq(Register dst, Operand src);
307  void Tzcntl(Register dst, Register src);
308  void Tzcntl(Register dst, Operand src);
309  void Popcntl(Register dst, Register src);
310  void Popcntl(Register dst, Operand src);
311  void Popcntq(Register dst, Register src);
312  void Popcntq(Register dst, Operand src);
313 
314  // Is the value a tagged smi.
315  Condition CheckSmi(Register src);
316  Condition CheckSmi(Operand src);
317 
318  // Jump to label if the value is a tagged smi.
319  void JumpIfSmi(Register src, Label* on_smi,
320  Label::Distance near_jump = Label::kFar);
321 
322  void JumpIfEqual(Register a, int32_t b, Label* dest) {
323  cmpl(a, Immediate(b));
324  j(equal, dest);
325  }
326 
327  void JumpIfLessThan(Register a, int32_t b, Label* dest) {
328  cmpl(a, Immediate(b));
329  j(less, dest);
330  }
331 
332  void Move(Register dst, Smi source);
333 
334  void Move(Operand dst, Smi source) {
335  Register constant = GetSmiConstant(source);
336  movp(dst, constant);
337  }
338 
339  void Move(Register dst, ExternalReference ext);
340 
341  void Move(XMMRegister dst, uint32_t src);
342  void Move(XMMRegister dst, uint64_t src);
343  void Move(XMMRegister dst, float src) { Move(dst, bit_cast<uint32_t>(src)); }
344  void Move(XMMRegister dst, double src) { Move(dst, bit_cast<uint64_t>(src)); }
345 
346  // Move if the registers are not identical.
347  void Move(Register target, Register source);
348 
349  void Move(Register dst, Handle<HeapObject> source,
350  RelocInfo::Mode rmode = RelocInfo::EMBEDDED_OBJECT);
351  void Move(Operand dst, Handle<HeapObject> source,
352  RelocInfo::Mode rmode = RelocInfo::EMBEDDED_OBJECT);
353 
354  // Loads a pointer into a register with a relocation mode.
355  void Move(Register dst, Address ptr, RelocInfo::Mode rmode) {
356  // This method must not be used with heap object references. The stored
357  // address is not GC safe. Use the handle version instead.
358  DCHECK(rmode > RelocInfo::LAST_GCED_ENUM);
359  movp(dst, ptr, rmode);
360  }
361 
362  void MoveStringConstant(Register result, const StringConstantBase* string,
363  RelocInfo::Mode rmode = RelocInfo::EMBEDDED_OBJECT);
364 
365  // Convert smi to word-size sign-extended value.
366  void SmiUntag(Register dst, Register src);
367  void SmiUntag(Register dst, Operand src);
368 
369  // Loads the address of the external reference into the destination
370  // register.
371  void LoadAddress(Register destination, ExternalReference source);
372 
373  void LoadFromConstantsTable(Register destination,
374  int constant_index) override;
375  void LoadRootRegisterOffset(Register destination, intptr_t offset) override;
376  void LoadRootRelative(Register destination, int32_t offset) override;
377 
378  // Operand pointing to an external reference.
379  // May emit code to set up the scratch register. The operand is
380  // only guaranteed to be correct as long as the scratch register
381  // isn't changed.
382  // If the operand is used more than once, use a scratch register
383  // that is guaranteed not to be clobbered.
384  Operand ExternalReferenceAsOperand(ExternalReference reference,
385  Register scratch = kScratchRegister);
386 
387  void Call(Register reg) { call(reg); }
388  void Call(Operand op);
389  void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
390  void Call(Address destination, RelocInfo::Mode rmode);
391  void Call(ExternalReference ext);
392  void Call(Label* target) { call(target); }
393 
394  void RetpolineCall(Register reg);
395  void RetpolineCall(Address destination, RelocInfo::Mode rmode);
396 
397  void Jump(Address destination, RelocInfo::Mode rmode);
398  void Jump(ExternalReference ext);
399  void Jump(Operand op);
400  void Jump(Handle<Code> code_object, RelocInfo::Mode rmode,
401  Condition cc = always);
402 
403  void RetpolineJump(Register reg);
404 
405  void CallForDeoptimization(Address target, int deopt_id,
406  RelocInfo::Mode rmode) {
407  USE(deopt_id);
408  call(target, rmode);
409  }
410 
411  // Non-SSE2 instructions.
412  void Pextrd(Register dst, XMMRegister src, int8_t imm8);
413  void Pinsrd(XMMRegister dst, Register src, int8_t imm8);
414  void Pinsrd(XMMRegister dst, Operand src, int8_t imm8);
415 
416  void CompareRoot(Register with, RootIndex index);
417  void CompareRoot(Operand with, RootIndex index);
418 
419  // Generates function and stub prologue code.
420  void StubPrologue(StackFrame::Type type);
421  void Prologue();
422 
423  // Calls Abort(msg) if the condition cc is not satisfied.
424  // Use --debug_code to enable.
425  void Assert(Condition cc, AbortReason reason);
426 
427  // Like Assert(), but without condition.
428  // Use --debug_code to enable.
429  void AssertUnreachable(AbortReason reason);
430 
431  // Abort execution if a 64 bit register containing a 32 bit payload does not
432  // have zeros in the top 32 bits, enabled via --debug-code.
433  void AssertZeroExtended(Register reg);
434 
435  // Like Assert(), but always enabled.
436  void Check(Condition cc, AbortReason reason);
437 
438  // Print a message to stdout and abort execution.
439  void Abort(AbortReason msg);
440 
441  // Check that the stack is aligned.
442  void CheckStackAlignment();
443 
444  // Activation support.
445  void EnterFrame(StackFrame::Type type);
446  void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg) {
447  // Out-of-line constant pool not implemented on x64.
448  UNREACHABLE();
449  }
450  void LeaveFrame(StackFrame::Type type);
451 
452  // Removes current frame and its arguments from the stack preserving the
453  // arguments and a return address pushed to the stack for the next call. Both
454  // |callee_args_count| and |caller_args_count_reg| do not include receiver.
455  // |callee_args_count| is not modified, |caller_args_count_reg| is trashed.
456  void PrepareForTailCall(const ParameterCount& callee_args_count,
457  Register caller_args_count_reg, Register scratch0,
458  Register scratch1);
459 
460  inline bool AllowThisStubCall(CodeStub* stub);
461 
462  // Call a runtime routine. This expects {centry} to contain a fitting CEntry
463  // builtin for the target runtime function and uses an indirect call.
464  void CallRuntimeWithCEntry(Runtime::FunctionId fid, Register centry);
465 
466  void InitializeRootRegister() {
467  ExternalReference isolate_root = ExternalReference::isolate_root(isolate());
468  Move(kRootRegister, isolate_root);
469  }
470 
471  void SaveRegisters(RegList registers);
472  void RestoreRegisters(RegList registers);
473 
474  void CallRecordWriteStub(Register object, Register address,
475  RememberedSetAction remembered_set_action,
476  SaveFPRegsMode fp_mode);
477  void CallRecordWriteStub(Register object, Register address,
478  RememberedSetAction remembered_set_action,
479  SaveFPRegsMode fp_mode, Address wasm_target);
480 
481  void MoveNumber(Register dst, double value);
482  void MoveNonSmi(Register dst, double value);
483 
484  // Calculate how much stack space (in bytes) are required to store caller
485  // registers excluding those specified in the arguments.
486  int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
487  Register exclusion1 = no_reg,
488  Register exclusion2 = no_reg,
489  Register exclusion3 = no_reg) const;
490 
491  // PushCallerSaved and PopCallerSaved do not arrange the registers in any
492  // particular order so they are not useful for calls that can cause a GC.
493  // The caller can exclude up to 3 registers that do not need to be saved and
494  // restored.
495 
496  // Push caller saved registers on the stack, and return the number of bytes
497  // stack pointer is adjusted.
498  int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
499  Register exclusion2 = no_reg,
500  Register exclusion3 = no_reg);
501  // Restore caller saved registers from the stack, and return the number of
502  // bytes stack pointer is adjusted.
503  int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
504  Register exclusion2 = no_reg,
505  Register exclusion3 = no_reg);
506 
507  // Compute the start of the generated instruction stream from the current PC.
508  // This is an alternative to embedding the {CodeObject} handle as a reference.
509  void ComputeCodeStartAddress(Register dst);
510 
511  void ResetSpeculationPoisonRegister();
512 
513  // ---------------------------------------------------------------------------
514  // Pointer compresstion Support
515 
516  // TODO(ishell): remove |scratch_for_debug| once pointer compression works.
517  void DecompressTaggedSigned(Register destination, Operand field_operand,
518  Register scratch_for_debug);
519  void DecompressTaggedPointer(Register destination, Operand field_operand,
520  Register scratch_for_debug);
521  void DecompressAnyTagged(Register destination, Operand field_operand,
522  Register scratch, Register scratch_for_debug);
523 
524  protected:
525  static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
526  int smi_count = 0;
527  int heap_object_count = 0;
528 
529  // Returns a register holding the smi value. The register MUST NOT be
530  // modified. It may be the "smi 1 constant" register.
531  Register GetSmiConstant(Smi value);
532 
533  void CallRecordWriteStub(Register object, Register address,
534  RememberedSetAction remembered_set_action,
535  SaveFPRegsMode fp_mode, Handle<Code> code_target,
536  Address wasm_target);
537 };
538 
539 // MacroAssembler implements a collection of frequently used macros.
540 class MacroAssembler : public TurboAssembler {
541  public:
542  MacroAssembler(const AssemblerOptions& options, void* buffer, int size)
543  : TurboAssembler(options, buffer, size) {}
544 
545  MacroAssembler(Isolate* isolate, void* buffer, int size,
546  CodeObjectRequired create_code_object)
547  : MacroAssembler(isolate, AssemblerOptions::Default(isolate), buffer,
548  size, create_code_object) {}
549 
550  MacroAssembler(Isolate* isolate, const AssemblerOptions& options,
551  void* buffer, int size, CodeObjectRequired create_code_object);
552 
553  // Loads and stores the value of an external reference.
554  // Special case code for load and store to take advantage of
555  // load_rax/store_rax if possible/necessary.
556  // For other operations, just use:
557  // Operand operand = ExternalReferenceAsOperand(extref);
558  // operation(operand, ..);
559  void Load(Register destination, ExternalReference source);
560  void Store(ExternalReference destination, Register source);
561 
562  // Pushes the address of the external reference onto the stack.
563  void PushAddress(ExternalReference source);
564 
565  // Operations on roots in the root-array.
566  // Load a root value where the index (or part of it) is variable.
567  // The variable_offset register is added to the fixed_offset value
568  // to get the index into the root-array.
569  void PushRoot(RootIndex index);
570 
571  // Compare the object in a register to a value and jump if they are equal.
572  void JumpIfRoot(Register with, RootIndex index, Label* if_equal,
573  Label::Distance if_equal_distance = Label::kFar) {
574  CompareRoot(with, index);
575  j(equal, if_equal, if_equal_distance);
576  }
577  void JumpIfRoot(Operand with, RootIndex index, Label* if_equal,
578  Label::Distance if_equal_distance = Label::kFar) {
579  CompareRoot(with, index);
580  j(equal, if_equal, if_equal_distance);
581  }
582 
583  // Compare the object in a register to a value and jump if they are not equal.
584  void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal,
585  Label::Distance if_not_equal_distance = Label::kFar) {
586  CompareRoot(with, index);
587  j(not_equal, if_not_equal, if_not_equal_distance);
588  }
589  void JumpIfNotRoot(Operand with, RootIndex index, Label* if_not_equal,
590  Label::Distance if_not_equal_distance = Label::kFar) {
591  CompareRoot(with, index);
592  j(not_equal, if_not_equal, if_not_equal_distance);
593  }
594 
595  // ---------------------------------------------------------------------------
596  // GC Support
597 
598  // Notify the garbage collector that we wrote a pointer into an object.
599  // |object| is the object being stored into, |value| is the object being
600  // stored. value and scratch registers are clobbered by the operation.
601  // The offset is the offset from the start of the object, not the offset from
602  // the tagged HeapObject pointer. For use with FieldOperand(reg, off).
603  void RecordWriteField(
604  Register object, int offset, Register value, Register scratch,
605  SaveFPRegsMode save_fp,
606  RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
607  SmiCheck smi_check = INLINE_SMI_CHECK);
608 
609  // For page containing |object| mark region covering |address|
610  // dirty. |object| is the object being stored into, |value| is the
611  // object being stored. The address and value registers are clobbered by the
612  // operation. RecordWrite filters out smis so it does not update
613  // the write barrier if the value is a smi.
614  void RecordWrite(
615  Register object, Register address, Register value, SaveFPRegsMode save_fp,
616  RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
617  SmiCheck smi_check = INLINE_SMI_CHECK);
618 
619  // Frame restart support.
620  void MaybeDropFrames();
621 
622  // Enter specific kind of exit frame; either in normal or
623  // debug mode. Expects the number of arguments in register rax and
624  // sets up the number of arguments in register rdi and the pointer
625  // to the first argument in register rsi.
626  //
627  // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
628  // accessible via StackSpaceOperand.
629  void EnterExitFrame(int arg_stack_space = 0, bool save_doubles = false,
630  StackFrame::Type frame_type = StackFrame::EXIT);
631 
632  // Enter specific kind of exit frame. Allocates arg_stack_space * kPointerSize
633  // memory (not GCed) on the stack accessible via StackSpaceOperand.
634  void EnterApiExitFrame(int arg_stack_space);
635 
636  // Leave the current exit frame. Expects/provides the return value in
637  // register rax:rdx (untouched) and the pointer to the first
638  // argument in register rsi (if pop_arguments == true).
639  void LeaveExitFrame(bool save_doubles = false, bool pop_arguments = true);
640 
641  // Leave the current exit frame. Expects/provides the return value in
642  // register rax (untouched).
643  void LeaveApiExitFrame();
644 
645  // Push and pop the registers that can hold pointers.
646  void PushSafepointRegisters() { Pushad(); }
647  void PopSafepointRegisters() { Popad(); }
648 
649  // ---------------------------------------------------------------------------
650  // JavaScript invokes
651 
652  // Invoke the JavaScript function code by either calling or jumping.
653  void InvokeFunctionCode(Register function, Register new_target,
654  const ParameterCount& expected,
655  const ParameterCount& actual, InvokeFlag flag);
656 
657  // On function call, call into the debugger if necessary.
658  void CheckDebugHook(Register fun, Register new_target,
659  const ParameterCount& expected,
660  const ParameterCount& actual);
661 
662  // Invoke the JavaScript function in the given register. Changes the
663  // current context to the context in the function before invoking.
664  void InvokeFunction(Register function, Register new_target,
665  const ParameterCount& actual, InvokeFlag flag);
666 
667  void InvokeFunction(Register function, Register new_target,
668  const ParameterCount& expected,
669  const ParameterCount& actual, InvokeFlag flag);
670 
671  // ---------------------------------------------------------------------------
672  // Conversions between tagged smi values and non-tagged integer values.
673 
674  // Tag an word-size value. The result must be known to be a valid smi value.
675  void SmiTag(Register dst, Register src);
676 
677  // Simple comparison of smis. Both sides must be known smis to use these,
678  // otherwise use Cmp.
679  void SmiCompare(Register smi1, Register smi2);
680  void SmiCompare(Register dst, Smi src);
681  void SmiCompare(Register dst, Operand src);
682  void SmiCompare(Operand dst, Register src);
683  void SmiCompare(Operand dst, Smi src);
684 
685  // Functions performing a check on a known or potential smi. Returns
686  // a condition that is satisfied if the check is successful.
687 
688  // Test-and-jump functions. Typically combines a check function
689  // above with a conditional jump.
690 
691  // Jump to label if the value is not a tagged smi.
692  void JumpIfNotSmi(Register src,
693  Label* on_not_smi,
694  Label::Distance near_jump = Label::kFar);
695 
696  // Jump to label if the value is not a tagged smi.
697  void JumpIfNotSmi(Operand src, Label* on_not_smi,
698  Label::Distance near_jump = Label::kFar);
699 
700  // Operations on tagged smi values.
701 
702  // Smis represent a subset of integers. The subset is always equivalent to
703  // a two's complement interpretation of a fixed number of bits.
704 
705  // Add an integer constant to a tagged smi, giving a tagged smi as result.
706  // No overflow testing on the result is done.
707  void SmiAddConstant(Operand dst, Smi constant);
708 
709  // Specialized operations
710 
711  // Converts, if necessary, a smi to a combination of number and
712  // multiplier to be used as a scaled index.
713  // The src register contains a *positive* smi value. The shift is the
714  // power of two to multiply the index value by (e.g.
715  // to index by smi-value * kPointerSize, pass the smi and kPointerSizeLog2).
716  // The returned index register may be either src or dst, depending
717  // on what is most efficient. If src and dst are different registers,
718  // src is always unchanged.
719  SmiIndex SmiToIndex(Register dst, Register src, int shift);
720 
721  // ---------------------------------------------------------------------------
722  // Macro instructions.
723 
724  // Load/store with specific representation.
725  void Load(Register dst, Operand src, Representation r);
726  void Store(Operand dst, Register src, Representation r);
727 
728  void Cmp(Register dst, Handle<Object> source);
729  void Cmp(Operand dst, Handle<Object> source);
730  void Cmp(Register dst, Smi src);
731  void Cmp(Operand dst, Smi src);
732 
733  // Emit code to discard a non-negative number of pointer-sized elements
734  // from the stack, clobbering only the rsp register.
735  void Drop(int stack_elements);
736  // Emit code to discard a positive number of pointer-sized elements
737  // from the stack under the return address which remains on the top,
738  // clobbering the rsp register.
739  void DropUnderReturnAddress(int stack_elements,
740  Register scratch = kScratchRegister);
741 
742  void PushQuad(Operand src);
743  void PushImm32(int32_t imm32);
744  void Pop(Register dst);
745  void Pop(Operand dst);
746  void PopQuad(Operand dst);
747 
748  // ---------------------------------------------------------------------------
749  // SIMD macros.
750  void Absps(XMMRegister dst);
751  void Negps(XMMRegister dst);
752  void Abspd(XMMRegister dst);
753  void Negpd(XMMRegister dst);
754  // Generates a trampoline to jump to the off-heap instruction stream.
755  void JumpToInstructionStream(Address entry);
756 
757  // Non-x64 instructions.
758  // Push/pop all general purpose registers.
759  // Does not push rsp/rbp nor any of the assembler's special purpose registers
760  // (kScratchRegister, kRootRegister).
761  void Pushad();
762  void Popad();
763 
764  // Compare object type for heap object.
765  // Always use unsigned comparisons: above and below, not less and greater.
766  // Incoming register is heap_object and outgoing register is map.
767  // They may be the same register, and may be kScratchRegister.
768  void CmpObjectType(Register heap_object, InstanceType type, Register map);
769 
770  // Compare instance type for map.
771  // Always use unsigned comparisons: above and below, not less and greater.
772  void CmpInstanceType(Register map, InstanceType type);
773 
774  void DoubleToI(Register result_reg, XMMRegister input_reg,
775  XMMRegister scratch, Label* lost_precision, Label* is_nan,
776  Label::Distance dst = Label::kFar);
777 
778  template<typename Field>
779  void DecodeField(Register reg) {
780  static const int shift = Field::kShift;
781  static const int mask = Field::kMask >> Field::kShift;
782  if (shift != 0) {
783  shrp(reg, Immediate(shift));
784  }
785  andp(reg, Immediate(mask));
786  }
787 
788  // Abort execution if argument is a smi, enabled via --debug-code.
789  void AssertNotSmi(Register object);
790 
791  // Abort execution if argument is not a smi, enabled via --debug-code.
792  void AssertSmi(Register object);
793  void AssertSmi(Operand object);
794 
795  // Abort execution if argument is not a Constructor, enabled via --debug-code.
796  void AssertConstructor(Register object);
797 
798  // Abort execution if argument is not a JSFunction, enabled via --debug-code.
799  void AssertFunction(Register object);
800 
801  // Abort execution if argument is not a JSBoundFunction,
802  // enabled via --debug-code.
803  void AssertBoundFunction(Register object);
804 
805  // Abort execution if argument is not a JSGeneratorObject (or subclass),
806  // enabled via --debug-code.
807  void AssertGeneratorObject(Register object);
808 
809  // Abort execution if argument is not undefined or an AllocationSite, enabled
810  // via --debug-code.
811  void AssertUndefinedOrAllocationSite(Register object);
812 
813  // ---------------------------------------------------------------------------
814  // Exception handling
815 
816  // Push a new stack handler and link it into stack handler chain.
817  void PushStackHandler();
818 
819  // Unlink the stack handler on top of the stack from the stack handler chain.
820  void PopStackHandler();
821 
822  // ---------------------------------------------------------------------------
823  // Support functions.
824 
825  // Load the global proxy from the current context.
826  void LoadGlobalProxy(Register dst) {
827  LoadNativeContextSlot(Context::GLOBAL_PROXY_INDEX, dst);
828  }
829 
830  // Load the native context slot with the current index.
831  void LoadNativeContextSlot(int index, Register dst);
832 
833  // ---------------------------------------------------------------------------
834  // Runtime calls
835 
836  // Call a code stub.
837  // The code object is generated immediately, in contrast to
838  // TurboAssembler::CallStubDelayed.
839  void CallStub(CodeStub* stub);
840 
841  // Tail call a code stub (jump).
842  void TailCallStub(CodeStub* stub);
843 
844  // Call a runtime routine.
845  void CallRuntime(const Runtime::Function* f,
846  int num_arguments,
847  SaveFPRegsMode save_doubles = kDontSaveFPRegs);
848 
849  // Convenience function: Same as above, but takes the fid instead.
850  void CallRuntime(Runtime::FunctionId fid,
851  SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
852  const Runtime::Function* function = Runtime::FunctionForId(fid);
853  CallRuntime(function, function->nargs, save_doubles);
854  }
855 
856  // Convenience function: Same as above, but takes the fid instead.
857  void CallRuntime(Runtime::FunctionId fid, int num_arguments,
858  SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
859  CallRuntime(Runtime::FunctionForId(fid), num_arguments, save_doubles);
860  }
861 
862  // Convenience function: tail call a runtime routine (jump)
863  void TailCallRuntime(Runtime::FunctionId fid);
864 
865  // Jump to a runtime routines
866  void JumpToExternalReference(const ExternalReference& ext,
867  bool builtin_exit_frame = false);
868 
869  // ---------------------------------------------------------------------------
870  // StatsCounter support
871  void IncrementCounter(StatsCounter* counter, int value);
872  void DecrementCounter(StatsCounter* counter, int value);
873 
874  // ---------------------------------------------------------------------------
875  // In-place weak references.
876  void LoadWeakValue(Register in_out, Label* target_if_cleared);
877 
878  // ---------------------------------------------------------------------------
879  // Debugging
880 
881  static int SafepointRegisterStackIndex(Register reg) {
882  return SafepointRegisterStackIndex(reg.code());
883  }
884 
885  private:
886  // Order general registers are pushed by Pushad.
887  // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14, r15.
888  static const int kSafepointPushRegisterIndices[Register::kNumRegisters];
889  static const int kNumSafepointSavedRegisters = 12;
890 
891  // Helper functions for generating invokes.
892  void InvokePrologue(const ParameterCount& expected,
893  const ParameterCount& actual, Label* done,
894  bool* definitely_mismatches, InvokeFlag flag,
895  Label::Distance near_jump);
896 
897  void EnterExitFramePrologue(bool save_rax, StackFrame::Type frame_type);
898 
899  // Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
900  // accessible via StackSpaceOperand.
901  void EnterExitFrameEpilogue(int arg_stack_space, bool save_doubles);
902 
903  void LeaveExitFrameEpilogue();
904 
905  // Helper for implementing JumpIfNotInNewSpace and JumpIfInNewSpace.
906  void InNewSpace(Register object,
907  Register scratch,
908  Condition cc,
909  Label* branch,
910  Label::Distance distance = Label::kFar);
911 
912  // Compute memory operands for safepoint stack slots.
913  static int SafepointRegisterStackIndex(int reg_code) {
914  return kNumSafepointRegisters - kSafepointPushRegisterIndices[reg_code] - 1;
915  }
916 
917  // Needs access to SafepointRegisterStackIndex for compiled frame
918  // traversal.
919  friend class StandardFrame;
920 };
921 
922 // -----------------------------------------------------------------------------
923 // Static helper functions.
924 
925 // Generate an Operand for loading a field from an object.
926 inline Operand FieldOperand(Register object, int offset) {
927  return Operand(object, offset - kHeapObjectTag);
928 }
929 
930 
931 // Generate an Operand for loading an indexed field from an object.
932 inline Operand FieldOperand(Register object,
933  Register index,
934  ScaleFactor scale,
935  int offset) {
936  return Operand(object, index, scale, offset - kHeapObjectTag);
937 }
938 
939 
940 inline Operand ContextOperand(Register context, int index) {
941  return Operand(context, Context::SlotOffset(index));
942 }
943 
944 
945 inline Operand ContextOperand(Register context, Register index) {
946  return Operand(context, index, times_pointer_size, Context::SlotOffset(0));
947 }
948 
949 
950 inline Operand NativeContextOperand() {
951  return ContextOperand(rsi, Context::NATIVE_CONTEXT_INDEX);
952 }
953 
954 
955 // Provides access to exit frame stack space (not GCed).
956 inline Operand StackSpaceOperand(int index) {
957 #ifdef _WIN64
958  const int kShaddowSpace = 4;
959  return Operand(rsp, (index + kShaddowSpace) * kPointerSize);
960 #else
961  return Operand(rsp, index * kPointerSize);
962 #endif
963 }
964 
965 
966 inline Operand StackOperandForReturnAddress(int32_t disp) {
967  return Operand(rsp, disp);
968 }
969 
970 #define ACCESS_MASM(masm) masm->
971 
972 } // namespace internal
973 } // namespace v8
974 
975 #endif // V8_X64_MACRO_ASSEMBLER_X64_H_
Definition: libplatform.h:13
Definition: v8.h:3740