V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
macro-assembler-ia32.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef INCLUDED_FROM_MACRO_ASSEMBLER_H
6 #error This header must be included via macro-assembler.h
7 #endif
8 
9 #ifndef V8_IA32_MACRO_ASSEMBLER_IA32_H_
10 #define V8_IA32_MACRO_ASSEMBLER_IA32_H_
11 
12 #include "src/assembler.h"
13 #include "src/bailout-reason.h"
14 #include "src/globals.h"
15 #include "src/ia32/assembler-ia32.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 // Give alias names to registers for calling conventions.
21 constexpr Register kReturnRegister0 = eax;
22 constexpr Register kReturnRegister1 = edx;
23 constexpr Register kReturnRegister2 = edi;
24 constexpr Register kJSFunctionRegister = edi;
25 constexpr Register kContextRegister = esi;
26 constexpr Register kAllocateSizeRegister = edx;
27 constexpr Register kInterpreterAccumulatorRegister = eax;
28 constexpr Register kInterpreterBytecodeOffsetRegister = edx;
29 constexpr Register kInterpreterBytecodeArrayRegister = edi;
30 constexpr Register kInterpreterDispatchTableRegister = esi;
31 
32 constexpr Register kJavaScriptCallArgCountRegister = eax;
33 constexpr Register kJavaScriptCallCodeStartRegister = ecx;
34 constexpr Register kJavaScriptCallTargetRegister = kJSFunctionRegister;
35 constexpr Register kJavaScriptCallNewTargetRegister = edx;
36 
37 // The ExtraArg1Register not part of the real JS calling convention and is
38 // mostly there to simplify consistent interface descriptor definitions across
39 // platforms. Note that on ia32 it aliases kJavaScriptCallCodeStartRegister.
40 constexpr Register kJavaScriptCallExtraArg1Register = ecx;
41 
42 // The off-heap trampoline does not need a register on ia32 (it uses a
43 // pc-relative call instead).
44 constexpr Register kOffHeapTrampolineRegister = no_reg;
45 
46 constexpr Register kRuntimeCallFunctionRegister = edx;
47 constexpr Register kRuntimeCallArgCountRegister = eax;
48 constexpr Register kRuntimeCallArgvRegister = ecx;
49 constexpr Register kWasmInstanceRegister = esi;
50 constexpr Register kWasmCompileLazyFuncIndexRegister = edi;
51 
52 constexpr Register kRootRegister = ebx;
53 
54 // TODO(860429): Remove remaining poisoning infrastructure on ia32.
55 constexpr Register kSpeculationPoisonRegister = no_reg;
56 
57 // Convenience for platform-independent signatures. We do not normally
58 // distinguish memory operands from other operands on ia32.
59 typedef Operand MemOperand;
60 
61 enum RememberedSetAction { EMIT_REMEMBERED_SET, OMIT_REMEMBERED_SET };
62 enum SmiCheck { INLINE_SMI_CHECK, OMIT_SMI_CHECK };
63 
64 class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
65  public:
66  TurboAssembler(const AssemblerOptions& options, void* buffer, int buffer_size)
67  : TurboAssemblerBase(options, buffer, buffer_size) {}
68 
69  TurboAssembler(Isolate* isolate, const AssemblerOptions& options,
70  void* buffer, int buffer_size,
71  CodeObjectRequired create_code_object)
72  : TurboAssemblerBase(isolate, options, buffer, buffer_size,
73  create_code_object) {}
74 
75  void CheckPageFlag(Register object, Register scratch, int mask, Condition cc,
76  Label* condition_met,
77  Label::Distance condition_met_distance = Label::kFar);
78 
79  // Activation support.
80  void EnterFrame(StackFrame::Type type);
81  void EnterFrame(StackFrame::Type type, bool load_constant_pool_pointer_reg) {
82  // Out-of-line constant pool not implemented on ia32.
83  UNREACHABLE();
84  }
85  void LeaveFrame(StackFrame::Type type);
86 
87 // Allocate a stack frame of given size (i.e. decrement {esp} by the value
88 // stored in the given register).
89 #ifdef V8_OS_WIN
90  // On win32, take special care if the number of bytes is greater than 4096:
91  // Ensure that each page within the new stack frame is touched once in
92  // decreasing order. See
93  // https://msdn.microsoft.com/en-us/library/aa227153(v=vs.60).aspx.
94  // Use {bytes_scratch} as scratch register for this procedure.
95  void AllocateStackFrame(Register bytes_scratch);
96 #else
97  void AllocateStackFrame(Register bytes) { sub(esp, bytes); }
98 #endif
99 
100  // Print a message to stdout and abort execution.
101  void Abort(AbortReason reason);
102 
103  // Calls Abort(msg) if the condition cc is not satisfied.
104  // Use --debug_code to enable.
105  void Assert(Condition cc, AbortReason reason);
106 
107  // Like Assert(), but without condition.
108  // Use --debug_code to enable.
109  void AssertUnreachable(AbortReason reason);
110 
111  // Like Assert(), but always enabled.
112  void Check(Condition cc, AbortReason reason);
113 
114  // Check that the stack is aligned.
115  void CheckStackAlignment();
116 
117  // Move a constant into a destination using the most efficient encoding.
118  void Move(Register dst, const Immediate& src);
119  void Move(Register dst, Smi src) { Move(dst, Immediate(src)); }
120  void Move(Register dst, Handle<HeapObject> src);
121  void Move(Register dst, Register src);
122  void Move(Operand dst, const Immediate& src);
123 
124  // Move an immediate into an XMM register.
125  void Move(XMMRegister dst, uint32_t src);
126  void Move(XMMRegister dst, uint64_t src);
127  void Move(XMMRegister dst, float src) { Move(dst, bit_cast<uint32_t>(src)); }
128  void Move(XMMRegister dst, double src) { Move(dst, bit_cast<uint64_t>(src)); }
129 
130  void Call(Register reg) { call(reg); }
131  void Call(Label* target) { call(target); }
132  void Call(Handle<Code> code_object, RelocInfo::Mode rmode);
133 
134  void Jump(Handle<Code> code_object, RelocInfo::Mode rmode);
135 
136  void RetpolineCall(Register reg);
137  void RetpolineCall(Address destination, RelocInfo::Mode rmode);
138 
139  void RetpolineJump(Register reg);
140 
141  void CallForDeoptimization(Address target, int deopt_id,
142  RelocInfo::Mode rmode) {
143  USE(deopt_id);
144  call(target, rmode);
145  }
146 
147  inline bool AllowThisStubCall(CodeStub* stub);
148 
149  // Call a runtime routine. This expects {centry} to contain a fitting CEntry
150  // builtin for the target runtime function and uses an indirect call.
151  void CallRuntimeWithCEntry(Runtime::FunctionId fid, Register centry);
152 
153  // Jump the register contains a smi.
154  inline void JumpIfSmi(Register value, Label* smi_label,
155  Label::Distance distance = Label::kFar) {
156  test(value, Immediate(kSmiTagMask));
157  j(zero, smi_label, distance);
158  }
159  // Jump if the operand is a smi.
160  inline void JumpIfSmi(Operand value, Label* smi_label,
161  Label::Distance distance = Label::kFar) {
162  test(value, Immediate(kSmiTagMask));
163  j(zero, smi_label, distance);
164  }
165 
166  void JumpIfEqual(Register a, int32_t b, Label* dest) {
167  cmp(a, Immediate(b));
168  j(equal, dest);
169  }
170 
171  void JumpIfLessThan(Register a, int32_t b, Label* dest) {
172  cmp(a, Immediate(b));
173  j(less, dest);
174  }
175 
176  void SmiUntag(Register reg) { sar(reg, kSmiTagSize); }
177 
178  // Removes current frame and its arguments from the stack preserving the
179  // arguments and a return address pushed to the stack for the next call. Both
180  // |callee_args_count| and |caller_args_count_reg| do not include receiver.
181  // |callee_args_count| is not modified, |caller_args_count_reg| is trashed.
182  // |number_of_temp_values_after_return_address| specifies the number of words
183  // pushed to the stack after the return address. This is to allow "allocation"
184  // of scratch registers that this function requires by saving their values on
185  // the stack.
186  void PrepareForTailCall(const ParameterCount& callee_args_count,
187  Register caller_args_count_reg, Register scratch0,
188  Register scratch1,
189  int number_of_temp_values_after_return_address);
190 
191  // Before calling a C-function from generated code, align arguments on stack.
192  // After aligning the frame, arguments must be stored in esp[0], esp[4],
193  // etc., not pushed. The argument count assumes all arguments are word sized.
194  // Some compilers/platforms require the stack to be aligned when calling
195  // C++ code.
196  // Needs a scratch register to do some arithmetic. This register will be
197  // trashed.
198  void PrepareCallCFunction(int num_arguments, Register scratch);
199 
200  // Calls a C function and cleans up the space for arguments allocated
201  // by PrepareCallCFunction. The called function is not allowed to trigger a
202  // garbage collection, since that might move the code and invalidate the
203  // return address (unless this is somehow accounted for by the called
204  // function).
205  void CallCFunction(ExternalReference function, int num_arguments);
206  void CallCFunction(Register function, int num_arguments);
207 
208  void ShlPair(Register high, Register low, uint8_t imm8);
209  void ShlPair_cl(Register high, Register low);
210  void ShrPair(Register high, Register low, uint8_t imm8);
211  void ShrPair_cl(Register high, Register low);
212  void SarPair(Register high, Register low, uint8_t imm8);
213  void SarPair_cl(Register high, Register low);
214 
215  // Generates function and stub prologue code.
216  void StubPrologue(StackFrame::Type type);
217  void Prologue();
218 
219  void Lzcnt(Register dst, Register src) { Lzcnt(dst, Operand(src)); }
220  void Lzcnt(Register dst, Operand src);
221 
222  void Tzcnt(Register dst, Register src) { Tzcnt(dst, Operand(src)); }
223  void Tzcnt(Register dst, Operand src);
224 
225  void Popcnt(Register dst, Register src) { Popcnt(dst, Operand(src)); }
226  void Popcnt(Register dst, Operand src);
227 
228  void Ret();
229 
230  // Root register utility functions.
231 
232  void InitializeRootRegister();
233 
234  void LoadRoot(Register destination, RootIndex index) override;
235 
236  // Indirect root-relative loads.
237  void LoadFromConstantsTable(Register destination,
238  int constant_index) override;
239  void LoadRootRegisterOffset(Register destination, intptr_t offset) override;
240  void LoadRootRelative(Register destination, int32_t offset) override;
241 
242  // Operand pointing to an external reference.
243  // May emit code to set up the scratch register. The operand is
244  // only guaranteed to be correct as long as the scratch register
245  // isn't changed.
246  // If the operand is used more than once, use a scratch register
247  // that is guaranteed not to be clobbered.
248  Operand ExternalReferenceAsOperand(ExternalReference reference,
249  Register scratch);
250  Operand ExternalReferenceAddressAsOperand(ExternalReference reference);
251  Operand HeapObjectAsOperand(Handle<HeapObject> object);
252 
253  void LoadAddress(Register destination, ExternalReference source);
254 
255  void CompareStackLimit(Register with);
256  void CompareRealStackLimit(Register with);
257  void CompareRoot(Register with, RootIndex index);
258  void CompareRoot(Register with, Register scratch, RootIndex index);
259 
260  // Return and drop arguments from stack, where the number of arguments
261  // may be bigger than 2^16 - 1. Requires a scratch register.
262  void Ret(int bytes_dropped, Register scratch);
263 
264  void Pshufhw(XMMRegister dst, XMMRegister src, uint8_t shuffle) {
265  Pshufhw(dst, Operand(src), shuffle);
266  }
267  void Pshufhw(XMMRegister dst, Operand src, uint8_t shuffle);
268  void Pshuflw(XMMRegister dst, XMMRegister src, uint8_t shuffle) {
269  Pshuflw(dst, Operand(src), shuffle);
270  }
271  void Pshuflw(XMMRegister dst, Operand src, uint8_t shuffle);
272  void Pshufd(XMMRegister dst, XMMRegister src, uint8_t shuffle) {
273  Pshufd(dst, Operand(src), shuffle);
274  }
275  void Pshufd(XMMRegister dst, Operand src, uint8_t shuffle);
276  void Psraw(XMMRegister dst, uint8_t shift);
277  void Psrlw(XMMRegister dst, uint8_t shift);
278 
279 // SSE/SSE2 instructions with AVX version.
280 #define AVX_OP2_WITH_TYPE(macro_name, name, dst_type, src_type) \
281  void macro_name(dst_type dst, src_type src) { \
282  if (CpuFeatures::IsSupported(AVX)) { \
283  CpuFeatureScope scope(this, AVX); \
284  v##name(dst, src); \
285  } else { \
286  name(dst, src); \
287  } \
288  }
289 
290  AVX_OP2_WITH_TYPE(Rcpps, rcpps, XMMRegister, const Operand&)
291  AVX_OP2_WITH_TYPE(Rsqrtps, rsqrtps, XMMRegister, const Operand&)
292  AVX_OP2_WITH_TYPE(Movdqu, movdqu, XMMRegister, Operand)
293  AVX_OP2_WITH_TYPE(Movdqu, movdqu, Operand, XMMRegister)
294  AVX_OP2_WITH_TYPE(Movd, movd, XMMRegister, Register)
295  AVX_OP2_WITH_TYPE(Movd, movd, XMMRegister, Operand)
296  AVX_OP2_WITH_TYPE(Movd, movd, Register, XMMRegister)
297  AVX_OP2_WITH_TYPE(Movd, movd, Operand, XMMRegister)
298  AVX_OP2_WITH_TYPE(Cvtdq2ps, cvtdq2ps, XMMRegister, Operand)
299 
300 #undef AVX_OP2_WITH_TYPE
301 
302 // Only use these macros when non-destructive source of AVX version is not
303 // needed.
304 #define AVX_OP3_WITH_TYPE(macro_name, name, dst_type, src_type) \
305  void macro_name(dst_type dst, src_type src) { \
306  if (CpuFeatures::IsSupported(AVX)) { \
307  CpuFeatureScope scope(this, AVX); \
308  v##name(dst, dst, src); \
309  } else { \
310  name(dst, src); \
311  } \
312  }
313 #define AVX_OP3_XO(macro_name, name) \
314  AVX_OP3_WITH_TYPE(macro_name, name, XMMRegister, XMMRegister) \
315  AVX_OP3_WITH_TYPE(macro_name, name, XMMRegister, Operand)
316 
317  AVX_OP3_XO(Packsswb, packsswb)
318  AVX_OP3_XO(Packuswb, packuswb)
319  AVX_OP3_XO(Pcmpeqb, pcmpeqb)
320  AVX_OP3_XO(Pcmpeqw, pcmpeqw)
321  AVX_OP3_XO(Pcmpeqd, pcmpeqd)
322  AVX_OP3_XO(Psubb, psubb)
323  AVX_OP3_XO(Psubw, psubw)
324  AVX_OP3_XO(Psubd, psubd)
325  AVX_OP3_XO(Punpcklbw, punpcklbw)
326  AVX_OP3_XO(Punpckhbw, punpckhbw)
327  AVX_OP3_XO(Pxor, pxor)
328  AVX_OP3_XO(Andps, andps)
329  AVX_OP3_XO(Andpd, andpd)
330  AVX_OP3_XO(Xorps, xorps)
331  AVX_OP3_XO(Xorpd, xorpd)
332  AVX_OP3_XO(Sqrtss, sqrtss)
333  AVX_OP3_XO(Sqrtsd, sqrtsd)
334 
335 #undef AVX_OP3_XO
336 #undef AVX_OP3_WITH_TYPE
337 
338 // Non-SSE2 instructions.
339 #define AVX_OP2_WITH_TYPE_SCOPE(macro_name, name, dst_type, src_type, \
340  sse_scope) \
341  void macro_name(dst_type dst, src_type src) { \
342  if (CpuFeatures::IsSupported(AVX)) { \
343  CpuFeatureScope scope(this, AVX); \
344  v##name(dst, src); \
345  return; \
346  } \
347  if (CpuFeatures::IsSupported(sse_scope)) { \
348  CpuFeatureScope scope(this, sse_scope); \
349  name(dst, src); \
350  return; \
351  } \
352  UNREACHABLE(); \
353  }
354 #define AVX_OP2_XO_SSE4(macro_name, name) \
355  AVX_OP2_WITH_TYPE_SCOPE(macro_name, name, XMMRegister, XMMRegister, SSE4_1) \
356  AVX_OP2_WITH_TYPE_SCOPE(macro_name, name, XMMRegister, Operand, SSE4_1)
357 
358  AVX_OP2_XO_SSE4(Ptest, ptest)
359  AVX_OP2_XO_SSE4(Pmovsxbw, pmovsxbw)
360  AVX_OP2_XO_SSE4(Pmovsxwd, pmovsxwd)
361  AVX_OP2_XO_SSE4(Pmovzxbw, pmovzxbw)
362  AVX_OP2_XO_SSE4(Pmovzxwd, pmovzxwd)
363 
364 #undef AVX_OP2_WITH_TYPE_SCOPE
365 #undef AVX_OP2_XO_SSE4
366 
367  void Pshufb(XMMRegister dst, XMMRegister src) { Pshufb(dst, Operand(src)); }
368  void Pshufb(XMMRegister dst, Operand src);
369  void Pblendw(XMMRegister dst, XMMRegister src, uint8_t imm8) {
370  Pblendw(dst, Operand(src), imm8);
371  }
372  void Pblendw(XMMRegister dst, Operand src, uint8_t imm8);
373 
374  void Psignb(XMMRegister dst, XMMRegister src) { Psignb(dst, Operand(src)); }
375  void Psignb(XMMRegister dst, Operand src);
376  void Psignw(XMMRegister dst, XMMRegister src) { Psignw(dst, Operand(src)); }
377  void Psignw(XMMRegister dst, Operand src);
378  void Psignd(XMMRegister dst, XMMRegister src) { Psignd(dst, Operand(src)); }
379  void Psignd(XMMRegister dst, Operand src);
380 
381  void Palignr(XMMRegister dst, XMMRegister src, uint8_t imm8) {
382  Palignr(dst, Operand(src), imm8);
383  }
384  void Palignr(XMMRegister dst, Operand src, uint8_t imm8);
385 
386  void Pextrb(Register dst, XMMRegister src, uint8_t imm8);
387  void Pextrw(Register dst, XMMRegister src, uint8_t imm8);
388  void Pextrd(Register dst, XMMRegister src, uint8_t imm8);
389  void Pinsrd(XMMRegister dst, Register src, uint8_t imm8) {
390  Pinsrd(dst, Operand(src), imm8);
391  }
392  void Pinsrd(XMMRegister dst, Operand src, uint8_t imm8);
393 
394  // Expression support
395  // cvtsi2sd instruction only writes to the low 64-bit of dst register, which
396  // hinders register renaming and makes dependence chains longer. So we use
397  // xorps to clear the dst register before cvtsi2sd to solve this issue.
398  void Cvtsi2ss(XMMRegister dst, Register src) { Cvtsi2ss(dst, Operand(src)); }
399  void Cvtsi2ss(XMMRegister dst, Operand src);
400  void Cvtsi2sd(XMMRegister dst, Register src) { Cvtsi2sd(dst, Operand(src)); }
401  void Cvtsi2sd(XMMRegister dst, Operand src);
402 
403  void Cvtui2ss(XMMRegister dst, Register src, Register tmp) {
404  Cvtui2ss(dst, Operand(src), tmp);
405  }
406  void Cvtui2ss(XMMRegister dst, Operand src, Register tmp);
407  void Cvttss2ui(Register dst, XMMRegister src, XMMRegister tmp) {
408  Cvttss2ui(dst, Operand(src), tmp);
409  }
410  void Cvttss2ui(Register dst, Operand src, XMMRegister tmp);
411  void Cvtui2sd(XMMRegister dst, Register src, Register scratch) {
412  Cvtui2sd(dst, Operand(src), scratch);
413  }
414  void Cvtui2sd(XMMRegister dst, Operand src, Register scratch);
415  void Cvttsd2ui(Register dst, XMMRegister src, XMMRegister tmp) {
416  Cvttsd2ui(dst, Operand(src), tmp);
417  }
418  void Cvttsd2ui(Register dst, Operand src, XMMRegister tmp);
419 
420  void Push(Register src) { push(src); }
421  void Push(Operand src) { push(src); }
422  void Push(Immediate value);
423  void Push(Handle<HeapObject> handle) { push(Immediate(handle)); }
424  void Push(Smi smi) { Push(Immediate(smi)); }
425 
426  void SaveRegisters(RegList registers);
427  void RestoreRegisters(RegList registers);
428 
429  void CallRecordWriteStub(Register object, Register address,
430  RememberedSetAction remembered_set_action,
431  SaveFPRegsMode fp_mode);
432  void CallRecordWriteStub(Register object, Register address,
433  RememberedSetAction remembered_set_action,
434  SaveFPRegsMode fp_mode, Address wasm_target);
435 
436  // Calculate how much stack space (in bytes) are required to store caller
437  // registers excluding those specified in the arguments.
438  int RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
439  Register exclusion1 = no_reg,
440  Register exclusion2 = no_reg,
441  Register exclusion3 = no_reg) const;
442 
443  // PushCallerSaved and PopCallerSaved do not arrange the registers in any
444  // particular order so they are not useful for calls that can cause a GC.
445  // The caller can exclude up to 3 registers that do not need to be saved and
446  // restored.
447 
448  // Push caller saved registers on the stack, and return the number of bytes
449  // stack pointer is adjusted.
450  int PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
451  Register exclusion2 = no_reg,
452  Register exclusion3 = no_reg);
453  // Restore caller saved registers from the stack, and return the number of
454  // bytes stack pointer is adjusted.
455  int PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1 = no_reg,
456  Register exclusion2 = no_reg,
457  Register exclusion3 = no_reg);
458 
459  // Compute the start of the generated instruction stream from the current PC.
460  // This is an alternative to embedding the {CodeObject} handle as a reference.
461  void ComputeCodeStartAddress(Register dst);
462 
463  // TODO(860429): Remove remaining poisoning infrastructure on ia32.
464  void ResetSpeculationPoisonRegister() { UNREACHABLE(); }
465 
466  void CallRecordWriteStub(Register object, Register address,
467  RememberedSetAction remembered_set_action,
468  SaveFPRegsMode fp_mode, Handle<Code> code_target,
469  Address wasm_target);
470 };
471 
472 // MacroAssembler implements a collection of frequently used macros.
473 class MacroAssembler : public TurboAssembler {
474  public:
475  MacroAssembler(const AssemblerOptions& options, void* buffer, int size)
476  : TurboAssembler(options, buffer, size) {}
477 
478  MacroAssembler(Isolate* isolate, void* buffer, int size,
479  CodeObjectRequired create_code_object)
480  : MacroAssembler(isolate, AssemblerOptions::Default(isolate), buffer,
481  size, create_code_object) {}
482 
483  MacroAssembler(Isolate* isolate, const AssemblerOptions& options,
484  void* buffer, int size, CodeObjectRequired create_code_object);
485 
486  // Load a register with a long value as efficiently as possible.
487  void Set(Register dst, int32_t x) {
488  if (x == 0) {
489  xor_(dst, dst);
490  } else {
491  mov(dst, Immediate(x));
492  }
493  }
494  void Set(Operand dst, int32_t x) { mov(dst, Immediate(x)); }
495 
496  void PushRoot(RootIndex index);
497 
498  // Compare the object in a register to a value and jump if they are equal.
499  void JumpIfRoot(Register with, RootIndex index, Label* if_equal,
500  Label::Distance if_equal_distance = Label::kFar) {
501  CompareRoot(with, index);
502  j(equal, if_equal, if_equal_distance);
503  }
504 
505  // Compare the object in a register to a value and jump if they are not equal.
506  void JumpIfNotRoot(Register with, RootIndex index, Label* if_not_equal,
507  Label::Distance if_not_equal_distance = Label::kFar) {
508  CompareRoot(with, index);
509  j(not_equal, if_not_equal, if_not_equal_distance);
510  }
511 
512  // ---------------------------------------------------------------------------
513  // GC Support
514  // Notify the garbage collector that we wrote a pointer into an object.
515  // |object| is the object being stored into, |value| is the object being
516  // stored. value and scratch registers are clobbered by the operation.
517  // The offset is the offset from the start of the object, not the offset from
518  // the tagged HeapObject pointer. For use with FieldOperand(reg, off).
519  void RecordWriteField(
520  Register object, int offset, Register value, Register scratch,
521  SaveFPRegsMode save_fp,
522  RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
523  SmiCheck smi_check = INLINE_SMI_CHECK);
524 
525  // For page containing |object| mark region covering |address|
526  // dirty. |object| is the object being stored into, |value| is the
527  // object being stored. The address and value registers are clobbered by the
528  // operation. RecordWrite filters out smis so it does not update the
529  // write barrier if the value is a smi.
530  void RecordWrite(
531  Register object, Register address, Register value, SaveFPRegsMode save_fp,
532  RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
533  SmiCheck smi_check = INLINE_SMI_CHECK);
534 
535  // Frame restart support
536  void MaybeDropFrames();
537 
538  // Enter specific kind of exit frame. Expects the number of
539  // arguments in register eax and sets up the number of arguments in
540  // register edi and the pointer to the first argument in register
541  // esi.
542  void EnterExitFrame(int argc, bool save_doubles, StackFrame::Type frame_type);
543 
544  void EnterApiExitFrame(int argc, Register scratch);
545 
546  // Leave the current exit frame. Expects the return value in
547  // register eax:edx (untouched) and the pointer to the first
548  // argument in register esi (if pop_arguments == true).
549  void LeaveExitFrame(bool save_doubles, bool pop_arguments = true);
550 
551  // Leave the current exit frame. Expects the return value in
552  // register eax (untouched).
553  void LeaveApiExitFrame();
554 
555  // Load the global proxy from the current context.
556  void LoadGlobalProxy(Register dst);
557 
558  // Load the global function with the given index.
559  void LoadGlobalFunction(int index, Register function);
560 
561  // Push and pop the registers that can hold pointers.
562  void PushSafepointRegisters() { pushad(); }
563  void PopSafepointRegisters() { popad(); }
564 
565  // ---------------------------------------------------------------------------
566  // JavaScript invokes
567 
568 
569  // Invoke the JavaScript function code by either calling or jumping.
570 
571  void InvokeFunctionCode(Register function, Register new_target,
572  const ParameterCount& expected,
573  const ParameterCount& actual, InvokeFlag flag);
574 
575  // On function call, call into the debugger if necessary.
576  // This may clobber ecx.
577  void CheckDebugHook(Register fun, Register new_target,
578  const ParameterCount& expected,
579  const ParameterCount& actual);
580 
581  // Invoke the JavaScript function in the given register. Changes the
582  // current context to the context in the function before invoking.
583  void InvokeFunction(Register function, Register new_target,
584  const ParameterCount& actual, InvokeFlag flag);
585 
586  // Compare object type for heap object.
587  // Incoming register is heap_object and outgoing register is map.
588  void CmpObjectType(Register heap_object, InstanceType type, Register map);
589 
590  // Compare instance type for map.
591  void CmpInstanceType(Register map, InstanceType type);
592 
593  void DoubleToI(Register result_reg, XMMRegister input_reg,
594  XMMRegister scratch, Label* lost_precision, Label* is_nan,
595  Label::Distance dst = Label::kFar);
596 
597  // Smi tagging support.
598  void SmiTag(Register reg) {
599  STATIC_ASSERT(kSmiTag == 0);
600  STATIC_ASSERT(kSmiTagSize == 1);
601  add(reg, reg);
602  }
603 
604  // Modifies the register even if it does not contain a Smi!
605  void UntagSmi(Register reg, Label* is_smi) {
606  STATIC_ASSERT(kSmiTagSize == 1);
607  sar(reg, kSmiTagSize);
608  STATIC_ASSERT(kSmiTag == 0);
609  j(not_carry, is_smi);
610  }
611 
612  // Jump if register contain a non-smi.
613  inline void JumpIfNotSmi(Register value, Label* not_smi_label,
614  Label::Distance distance = Label::kFar) {
615  test(value, Immediate(kSmiTagMask));
616  j(not_zero, not_smi_label, distance);
617  }
618  // Jump if the operand is not a smi.
619  inline void JumpIfNotSmi(Operand value, Label* smi_label,
620  Label::Distance distance = Label::kFar) {
621  test(value, Immediate(kSmiTagMask));
622  j(not_zero, smi_label, distance);
623  }
624 
625  template<typename Field>
626  void DecodeField(Register reg) {
627  static const int shift = Field::kShift;
628  static const int mask = Field::kMask >> Field::kShift;
629  if (shift != 0) {
630  sar(reg, shift);
631  }
632  and_(reg, Immediate(mask));
633  }
634 
635  // Abort execution if argument is not a smi, enabled via --debug-code.
636  void AssertSmi(Register object);
637 
638  // Abort execution if argument is a smi, enabled via --debug-code.
639  void AssertNotSmi(Register object);
640 
641  // Abort execution if argument is not a JSFunction, enabled via --debug-code.
642  void AssertFunction(Register object);
643 
644  // Abort execution if argument is not a Constructor, enabled via --debug-code.
645  void AssertConstructor(Register object);
646 
647  // Abort execution if argument is not a JSBoundFunction,
648  // enabled via --debug-code.
649  void AssertBoundFunction(Register object);
650 
651  // Abort execution if argument is not a JSGeneratorObject (or subclass),
652  // enabled via --debug-code.
653  void AssertGeneratorObject(Register object);
654 
655  // Abort execution if argument is not undefined or an AllocationSite, enabled
656  // via --debug-code.
657  void AssertUndefinedOrAllocationSite(Register object, Register scratch);
658 
659  // ---------------------------------------------------------------------------
660  // Exception handling
661 
662  // Push a new stack handler and link it into stack handler chain.
663  void PushStackHandler(Register scratch);
664 
665  // Unlink the stack handler on top of the stack from the stack handler chain.
666  void PopStackHandler(Register scratch);
667 
668  // ---------------------------------------------------------------------------
669  // Runtime calls
670 
671  // Call a code stub. Generate the code if necessary.
672  void CallStub(CodeStub* stub);
673 
674  // Tail call a code stub (jump). Generate the code if necessary.
675  void TailCallStub(CodeStub* stub);
676 
677  // Call a runtime routine.
678  void CallRuntime(const Runtime::Function* f, int num_arguments,
679  SaveFPRegsMode save_doubles = kDontSaveFPRegs);
680 
681  // Convenience function: Same as above, but takes the fid instead.
682  void CallRuntime(Runtime::FunctionId fid,
683  SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
684  const Runtime::Function* function = Runtime::FunctionForId(fid);
685  CallRuntime(function, function->nargs, save_doubles);
686  }
687 
688  // Convenience function: Same as above, but takes the fid instead.
689  void CallRuntime(Runtime::FunctionId fid, int num_arguments,
690  SaveFPRegsMode save_doubles = kDontSaveFPRegs) {
691  CallRuntime(Runtime::FunctionForId(fid), num_arguments, save_doubles);
692  }
693 
694  // Convenience function: tail call a runtime routine (jump).
695  void TailCallRuntime(Runtime::FunctionId fid);
696 
697  // Jump to a runtime routine.
698  void JumpToExternalReference(const ExternalReference& ext,
699  bool builtin_exit_frame = false);
700 
701  // Generates a trampoline to jump to the off-heap instruction stream.
702  void JumpToInstructionStream(Address entry);
703 
704  // ---------------------------------------------------------------------------
705  // Utilities
706 
707  // Emit code to discard a non-negative number of pointer-sized elements
708  // from the stack, clobbering only the esp register.
709  void Drop(int element_count);
710 
711  void Pop(Register dst) { pop(dst); }
712  void Pop(Operand dst) { pop(dst); }
713  void PushReturnAddressFrom(Register src) { push(src); }
714  void PopReturnAddressTo(Register dst) { pop(dst); }
715 
716  // ---------------------------------------------------------------------------
717  // In-place weak references.
718  void LoadWeakValue(Register in_out, Label* target_if_cleared);
719 
720  // ---------------------------------------------------------------------------
721  // StatsCounter support
722 
723  void IncrementCounter(StatsCounter* counter, int value, Register scratch);
724  void DecrementCounter(StatsCounter* counter, int value, Register scratch);
725 
726  static int SafepointRegisterStackIndex(Register reg) {
727  return SafepointRegisterStackIndex(reg.code());
728  }
729 
730  private:
731  // Helper functions for generating invokes.
732  void InvokePrologue(const ParameterCount& expected,
733  const ParameterCount& actual, Label* done,
734  bool* definitely_mismatches, InvokeFlag flag,
735  Label::Distance done_distance);
736 
737  void EnterExitFramePrologue(StackFrame::Type frame_type, Register scratch);
738  void EnterExitFrameEpilogue(int argc, bool save_doubles);
739 
740  void LeaveExitFrameEpilogue();
741 
742  // Helper for implementing JumpIfNotInNewSpace and JumpIfInNewSpace.
743  void InNewSpace(Register object, Register scratch, Condition cc,
744  Label* condition_met,
745  Label::Distance condition_met_distance = Label::kFar);
746 
747  // Compute memory operands for safepoint stack slots.
748  static int SafepointRegisterStackIndex(int reg_code);
749 
750  // Needs access to SafepointRegisterStackIndex for compiled frame
751  // traversal.
752  friend class StandardFrame;
753 };
754 
755 // -----------------------------------------------------------------------------
756 // Static helper functions.
757 
758 // Generate an Operand for loading a field from an object.
759 inline Operand FieldOperand(Register object, int offset) {
760  return Operand(object, offset - kHeapObjectTag);
761 }
762 
763 // Generate an Operand for loading an indexed field from an object.
764 inline Operand FieldOperand(Register object, Register index, ScaleFactor scale,
765  int offset) {
766  return Operand(object, index, scale, offset - kHeapObjectTag);
767 }
768 
769 inline Operand FixedArrayElementOperand(Register array, Register index_as_smi,
770  int additional_offset = 0) {
771  int offset = FixedArray::kHeaderSize + additional_offset * kPointerSize;
772  return FieldOperand(array, index_as_smi, times_half_pointer_size, offset);
773 }
774 
775 inline Operand ContextOperand(Register context, int index) {
776  return Operand(context, Context::SlotOffset(index));
777 }
778 
779 inline Operand ContextOperand(Register context, Register index) {
780  return Operand(context, index, times_pointer_size, Context::SlotOffset(0));
781 }
782 
783 inline Operand NativeContextOperand() {
784  return ContextOperand(esi, Context::NATIVE_CONTEXT_INDEX);
785 }
786 
787 #define ACCESS_MASM(masm) masm->
788 
789 } // namespace internal
790 } // namespace v8
791 
792 #endif // V8_IA32_MACRO_ASSEMBLER_IA32_H_
Definition: libplatform.h:13