V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
builtins-x64.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_X64
6 
7 #include "src/base/adapters.h"
8 #include "src/code-factory.h"
9 #include "src/counters.h"
10 #include "src/deoptimizer.h"
11 #include "src/frame-constants.h"
12 #include "src/frames.h"
13 #include "src/objects-inl.h"
14 #include "src/objects/debug-objects.h"
15 #include "src/objects/js-generator.h"
16 #include "src/objects/smi.h"
17 #include "src/register-configuration.h"
18 #include "src/wasm/wasm-linkage.h"
19 #include "src/wasm/wasm-objects.h"
20 
21 namespace v8 {
22 namespace internal {
23 
24 #define __ ACCESS_MASM(masm)
25 
26 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
27  ExitFrameType exit_frame_type) {
28  __ LoadAddress(kJavaScriptCallExtraArg1Register,
29  ExternalReference::Create(address));
30  if (exit_frame_type == BUILTIN_EXIT) {
31  __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
32  RelocInfo::CODE_TARGET);
33  } else {
34  DCHECK(exit_frame_type == EXIT);
35  __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
36  RelocInfo::CODE_TARGET);
37  }
38 }
39 
40 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
41  Runtime::FunctionId function_id) {
42  // ----------- S t a t e -------------
43  // -- rax : argument count (preserved for callee)
44  // -- rdx : new target (preserved for callee)
45  // -- rdi : target function (preserved for callee)
46  // -----------------------------------
47  {
48  FrameScope scope(masm, StackFrame::INTERNAL);
49  // Push the number of arguments to the callee.
50  __ SmiTag(rax, rax);
51  __ Push(rax);
52  // Push a copy of the target function and the new target.
53  __ Push(rdi);
54  __ Push(rdx);
55  // Function is also the parameter to the runtime call.
56  __ Push(rdi);
57 
58  __ CallRuntime(function_id, 1);
59  __ movp(rcx, rax);
60 
61  // Restore target function and new target.
62  __ Pop(rdx);
63  __ Pop(rdi);
64  __ Pop(rax);
65  __ SmiUntag(rax, rax);
66  }
67  static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
68  __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
69  __ jmp(rcx);
70 }
71 
72 namespace {
73 
74 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
75  // ----------- S t a t e -------------
76  // -- rax: number of arguments
77  // -- rdi: constructor function
78  // -- rdx: new target
79  // -- rsi: context
80  // -----------------------------------
81 
82  // Enter a construct frame.
83  {
84  FrameScope scope(masm, StackFrame::CONSTRUCT);
85 
86  // Preserve the incoming parameters on the stack.
87  __ SmiTag(rcx, rax);
88  __ Push(rsi);
89  __ Push(rcx);
90 
91  // The receiver for the builtin/api call.
92  __ PushRoot(RootIndex::kTheHoleValue);
93 
94  // Set up pointer to last argument.
95  __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
96 
97  // Copy arguments and receiver to the expression stack.
98  Label loop, entry;
99  __ movp(rcx, rax);
100  // ----------- S t a t e -------------
101  // -- rax: number of arguments (untagged)
102  // -- rdi: constructor function
103  // -- rdx: new target
104  // -- rbx: pointer to last argument
105  // -- rcx: counter
106  // -- sp[0*kPointerSize]: the hole (receiver)
107  // -- sp[1*kPointerSize]: number of arguments (tagged)
108  // -- sp[2*kPointerSize]: context
109  // -----------------------------------
110  __ jmp(&entry);
111  __ bind(&loop);
112  __ Push(Operand(rbx, rcx, times_pointer_size, 0));
113  __ bind(&entry);
114  __ decp(rcx);
115  __ j(greater_equal, &loop, Label::kNear);
116 
117  // Call the function.
118  // rax: number of arguments (untagged)
119  // rdi: constructor function
120  // rdx: new target
121  ParameterCount actual(rax);
122  __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
123 
124  // Restore context from the frame.
125  __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
126  // Restore smi-tagged arguments count from the frame.
127  __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
128 
129  // Leave construct frame.
130  }
131 
132  // Remove caller arguments from the stack and return.
133  __ PopReturnAddressTo(rcx);
134  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
135  __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
136  __ PushReturnAddressFrom(rcx);
137 
138  __ ret(0);
139 }
140 
141 void Generate_StackOverflowCheck(
142  MacroAssembler* masm, Register num_args, Register scratch,
143  Label* stack_overflow,
144  Label::Distance stack_overflow_distance = Label::kFar) {
145  // Check the stack for overflow. We are not trying to catch
146  // interruptions (e.g. debug break and preemption) here, so the "real stack
147  // limit" is checked.
148  __ LoadRoot(kScratchRegister, RootIndex::kRealStackLimit);
149  __ movp(scratch, rsp);
150  // Make scratch the space we have left. The stack might already be overflowed
151  // here which will cause scratch to become negative.
152  __ subp(scratch, kScratchRegister);
153  __ sarp(scratch, Immediate(kPointerSizeLog2));
154  // Check if the arguments will overflow the stack.
155  __ cmpp(scratch, num_args);
156  // Signed comparison.
157  __ j(less_equal, stack_overflow, stack_overflow_distance);
158 }
159 
160 } // namespace
161 
162 // The construct stub for ES5 constructor functions and ES6 class constructors.
163 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
164  // ----------- S t a t e -------------
165  // -- rax: number of arguments (untagged)
166  // -- rdi: constructor function
167  // -- rdx: new target
168  // -- rsi: context
169  // -- sp[...]: constructor arguments
170  // -----------------------------------
171 
172  // Enter a construct frame.
173  {
174  FrameScope scope(masm, StackFrame::CONSTRUCT);
175  Label post_instantiation_deopt_entry, not_create_implicit_receiver;
176 
177  // Preserve the incoming parameters on the stack.
178  __ SmiTag(rcx, rax);
179  __ Push(rsi);
180  __ Push(rcx);
181  __ Push(rdi);
182  __ PushRoot(RootIndex::kTheHoleValue);
183  __ Push(rdx);
184 
185  // ----------- S t a t e -------------
186  // -- sp[0*kPointerSize]: new target
187  // -- sp[1*kPointerSize]: padding
188  // -- rdi and sp[2*kPointerSize]: constructor function
189  // -- sp[3*kPointerSize]: argument count
190  // -- sp[4*kPointerSize]: context
191  // -----------------------------------
192 
193  __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
194  __ testl(FieldOperand(rbx, SharedFunctionInfo::kFlagsOffset),
195  Immediate(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
196  __ j(not_zero, &not_create_implicit_receiver, Label::kNear);
197 
198  // If not derived class constructor: Allocate the new receiver object.
199  __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
200  __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
201  RelocInfo::CODE_TARGET);
202  __ jmp(&post_instantiation_deopt_entry, Label::kNear);
203 
204  // Else: use TheHoleValue as receiver for constructor call
205  __ bind(&not_create_implicit_receiver);
206  __ LoadRoot(rax, RootIndex::kTheHoleValue);
207 
208  // ----------- S t a t e -------------
209  // -- rax implicit receiver
210  // -- Slot 4 / sp[0*kPointerSize] new target
211  // -- Slot 3 / sp[1*kPointerSize] padding
212  // -- Slot 2 / sp[2*kPointerSize] constructor function
213  // -- Slot 1 / sp[3*kPointerSize] number of arguments (tagged)
214  // -- Slot 0 / sp[4*kPointerSize] context
215  // -----------------------------------
216  // Deoptimizer enters here.
217  masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
218  masm->pc_offset());
219  __ bind(&post_instantiation_deopt_entry);
220 
221  // Restore new target.
222  __ Pop(rdx);
223 
224  // Push the allocated receiver to the stack. We need two copies
225  // because we may have to return the original one and the calling
226  // conventions dictate that the called function pops the receiver.
227  __ Push(rax);
228  __ Push(rax);
229 
230  // ----------- S t a t e -------------
231  // -- sp[0*kPointerSize] implicit receiver
232  // -- sp[1*kPointerSize] implicit receiver
233  // -- sp[2*kPointerSize] padding
234  // -- sp[3*kPointerSize] constructor function
235  // -- sp[4*kPointerSize] number of arguments (tagged)
236  // -- sp[5*kPointerSize] context
237  // -----------------------------------
238 
239  // Restore constructor function and argument count.
240  __ movp(rdi, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
241  __ SmiUntag(rax, Operand(rbp, ConstructFrameConstants::kLengthOffset));
242 
243  // Set up pointer to last argument.
244  __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
245 
246  // Check if we have enough stack space to push all arguments.
247  // Argument count in rax. Clobbers rcx.
248  Label enough_stack_space, stack_overflow;
249  Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
250  __ jmp(&enough_stack_space, Label::kNear);
251 
252  __ bind(&stack_overflow);
253  // Restore context from the frame.
254  __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
255  __ CallRuntime(Runtime::kThrowStackOverflow);
256  // This should be unreachable.
257  __ int3();
258 
259  __ bind(&enough_stack_space);
260 
261  // Copy arguments and receiver to the expression stack.
262  Label loop, entry;
263  __ movp(rcx, rax);
264  // ----------- S t a t e -------------
265  // -- rax: number of arguments (untagged)
266  // -- rdx: new target
267  // -- rbx: pointer to last argument
268  // -- rcx: counter (tagged)
269  // -- sp[0*kPointerSize]: implicit receiver
270  // -- sp[1*kPointerSize]: implicit receiver
271  // -- sp[2*kPointerSize]: padding
272  // -- rdi and sp[3*kPointerSize]: constructor function
273  // -- sp[4*kPointerSize]: number of arguments (tagged)
274  // -- sp[5*kPointerSize]: context
275  // -----------------------------------
276  __ jmp(&entry, Label::kNear);
277  __ bind(&loop);
278  __ Push(Operand(rbx, rcx, times_pointer_size, 0));
279  __ bind(&entry);
280  __ decp(rcx);
281  __ j(greater_equal, &loop, Label::kNear);
282 
283  // Call the function.
284  ParameterCount actual(rax);
285  __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
286 
287  // ----------- S t a t e -------------
288  // -- rax constructor result
289  // -- sp[0*kPointerSize] implicit receiver
290  // -- sp[1*kPointerSize] padding
291  // -- sp[2*kPointerSize] constructor function
292  // -- sp[3*kPointerSize] number of arguments
293  // -- sp[4*kPointerSize] context
294  // -----------------------------------
295 
296  // Store offset of return address for deoptimizer.
297  masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
298  masm->pc_offset());
299 
300  // Restore context from the frame.
301  __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
302 
303  // If the result is an object (in the ECMA sense), we should get rid
304  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
305  // on page 74.
306  Label use_receiver, do_throw, leave_frame;
307 
308  // If the result is undefined, we jump out to using the implicit receiver.
309  __ JumpIfRoot(rax, RootIndex::kUndefinedValue, &use_receiver, Label::kNear);
310 
311  // Otherwise we do a smi check and fall through to check if the return value
312  // is a valid receiver.
313 
314  // If the result is a smi, it is *not* an object in the ECMA sense.
315  __ JumpIfSmi(rax, &use_receiver, Label::kNear);
316 
317  // If the type of the result (stored in its map) is less than
318  // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
319  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
320  __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
321  __ j(above_equal, &leave_frame, Label::kNear);
322  __ jmp(&use_receiver, Label::kNear);
323 
324  __ bind(&do_throw);
325  __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
326 
327  // Throw away the result of the constructor invocation and use the
328  // on-stack receiver as the result.
329  __ bind(&use_receiver);
330  __ movp(rax, Operand(rsp, 0 * kPointerSize));
331  __ JumpIfRoot(rax, RootIndex::kTheHoleValue, &do_throw, Label::kNear);
332 
333  __ bind(&leave_frame);
334  // Restore the arguments count.
335  __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
336  // Leave construct frame.
337  }
338  // Remove caller arguments from the stack and return.
339  __ PopReturnAddressTo(rcx);
340  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
341  __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
342  __ PushReturnAddressFrom(rcx);
343  __ ret(0);
344 }
345 
346 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
347  Generate_JSBuiltinsConstructStubHelper(masm);
348 }
349 
350 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
351  FrameScope scope(masm, StackFrame::INTERNAL);
352  __ Push(rdi);
353  __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
354 }
355 
356 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
357  bool is_construct) {
358  // Expects five C++ function parameters.
359  // - Object* new_target
360  // - JSFunction* function
361  // - Object* receiver
362  // - int argc
363  // - Object*** argv
364  // (see Handle::Invoke in execution.cc).
365 
366  // Open a C++ scope for the FrameScope.
367  {
368 // Platform specific argument handling. After this, the stack contains
369 // an internal frame and the pushed function and receiver, and
370 // register rax and rbx holds the argument count and argument array,
371 // while rdi holds the function pointer, rsi the context, and rdx the
372 // new.target.
373 
374 #ifdef _WIN64
375  // MSVC parameters in:
376  // rcx : new_target
377  // rdx : function
378  // r8 : receiver
379  // r9 : argc
380  // [rsp+0x20] : argv
381 
382  // Enter an internal frame.
383  FrameScope scope(masm, StackFrame::INTERNAL);
384 
385  // Setup the context (we need to use the caller context from the isolate).
386  ExternalReference context_address = ExternalReference::Create(
387  IsolateAddressId::kContextAddress, masm->isolate());
388  __ movp(rsi, masm->ExternalReferenceAsOperand(context_address));
389 
390  // Push the function and the receiver onto the stack.
391  __ Push(rdx);
392  __ Push(r8);
393 
394  // Load the number of arguments and setup pointer to the arguments.
395  __ movp(rax, r9);
396  // Load the previous frame pointer to access C argument on stack
397  __ movp(kScratchRegister, Operand(rbp, 0));
398  __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
399  // Load the function pointer into rdi.
400  __ movp(rdi, rdx);
401  // Load the new.target into rdx.
402  __ movp(rdx, rcx);
403 #else // _WIN64
404  // GCC parameters in:
405  // rdi : new_target
406  // rsi : function
407  // rdx : receiver
408  // rcx : argc
409  // r8 : argv
410 
411  __ movp(r11, rdi);
412  __ movp(rdi, rsi);
413  // rdi : function
414  // r11 : new_target
415 
416  // Clear the context before we push it when entering the internal frame.
417  __ Set(rsi, 0);
418 
419  // Enter an internal frame.
420  FrameScope scope(masm, StackFrame::INTERNAL);
421 
422  // Setup the context (we need to use the caller context from the isolate).
423  ExternalReference context_address = ExternalReference::Create(
424  IsolateAddressId::kContextAddress, masm->isolate());
425  __ movp(rsi, masm->ExternalReferenceAsOperand(context_address));
426 
427  // Push the function and receiver onto the stack.
428  __ Push(rdi);
429  __ Push(rdx);
430 
431  // Load the number of arguments and setup pointer to the arguments.
432  __ movp(rax, rcx);
433  __ movp(rbx, r8);
434 
435  // Load the new.target into rdx.
436  __ movp(rdx, r11);
437 #endif // _WIN64
438 
439  // Current stack contents:
440  // [rsp + 2 * kPointerSize ... ] : Internal frame
441  // [rsp + kPointerSize] : function
442  // [rsp] : receiver
443  // Current register contents:
444  // rax : argc
445  // rbx : argv
446  // rsi : context
447  // rdi : function
448  // rdx : new.target
449 
450  // Check if we have enough stack space to push all arguments.
451  // Argument count in rax. Clobbers rcx.
452  Label enough_stack_space, stack_overflow;
453  Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
454  __ jmp(&enough_stack_space, Label::kNear);
455 
456  __ bind(&stack_overflow);
457  __ CallRuntime(Runtime::kThrowStackOverflow);
458  // This should be unreachable.
459  __ int3();
460 
461  __ bind(&enough_stack_space);
462 
463  // Copy arguments to the stack in a loop.
464  // Register rbx points to array of pointers to handle locations.
465  // Push the values of these handles.
466  Label loop, entry;
467  __ Set(rcx, 0); // Set loop variable to 0.
468  __ jmp(&entry, Label::kNear);
469  __ bind(&loop);
470  __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
471  __ Push(Operand(kScratchRegister, 0)); // dereference handle
472  __ addp(rcx, Immediate(1));
473  __ bind(&entry);
474  __ cmpp(rcx, rax);
475  __ j(not_equal, &loop, Label::kNear);
476 
477  // Invoke the builtin code.
478  Handle<Code> builtin = is_construct
479  ? BUILTIN_CODE(masm->isolate(), Construct)
480  : masm->isolate()->builtins()->Call();
481  __ Call(builtin, RelocInfo::CODE_TARGET);
482 
483  // Exit the internal frame. Notice that this also removes the empty
484  // context and the function left on the stack by the code
485  // invocation.
486  }
487 
488  __ ret(0);
489 }
490 
491 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
492  Generate_JSEntryTrampolineHelper(masm, false);
493 }
494 
495 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
496  Generate_JSEntryTrampolineHelper(masm, true);
497 }
498 
499 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
500  Register sfi_data,
501  Register scratch1) {
502  Label done;
503 
504  __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
505  __ j(not_equal, &done, Label::kNear);
506  __ movp(sfi_data,
507  FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
508 
509  __ bind(&done);
510 }
511 
512 // static
513 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
514  // ----------- S t a t e -------------
515  // -- rax : the value to pass to the generator
516  // -- rdx : the JSGeneratorObject to resume
517  // -- rsp[0] : return address
518  // -----------------------------------
519  __ AssertGeneratorObject(rdx);
520 
521  // Store input value into generator object.
522  __ movp(FieldOperand(rdx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
523  __ RecordWriteField(rdx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx,
524  kDontSaveFPRegs);
525 
526  // Load suspended function and context.
527  __ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
528  __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
529 
530  // Flood function if we are stepping.
531  Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
532  Label stepping_prepared;
533  ExternalReference debug_hook =
534  ExternalReference::debug_hook_on_function_call_address(masm->isolate());
535  Operand debug_hook_operand = masm->ExternalReferenceAsOperand(debug_hook);
536  __ cmpb(debug_hook_operand, Immediate(0));
537  __ j(not_equal, &prepare_step_in_if_stepping);
538 
539  // Flood function if we need to continue stepping in the suspended generator.
540  ExternalReference debug_suspended_generator =
541  ExternalReference::debug_suspended_generator_address(masm->isolate());
542  Operand debug_suspended_generator_operand =
543  masm->ExternalReferenceAsOperand(debug_suspended_generator);
544  __ cmpp(rdx, debug_suspended_generator_operand);
545  __ j(equal, &prepare_step_in_suspended_generator);
546  __ bind(&stepping_prepared);
547 
548  // Check the stack for overflow. We are not trying to catch interruptions
549  // (i.e. debug break and preemption) here, so check the "real stack limit".
550  Label stack_overflow;
551  __ CompareRoot(rsp, RootIndex::kRealStackLimit);
552  __ j(below, &stack_overflow);
553 
554  // Pop return address.
555  __ PopReturnAddressTo(rax);
556 
557  // Push receiver.
558  __ Push(FieldOperand(rdx, JSGeneratorObject::kReceiverOffset));
559 
560  // ----------- S t a t e -------------
561  // -- rax : return address
562  // -- rdx : the JSGeneratorObject to resume
563  // -- rdi : generator function
564  // -- rsi : generator context
565  // -- rsp[0] : generator receiver
566  // -----------------------------------
567 
568  // Copy the function arguments from the generator object's register file.
569  __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
570  __ movzxwq(
571  rcx, FieldOperand(rcx, SharedFunctionInfo::kFormalParameterCountOffset));
572 
573  __ movp(rbx,
574  FieldOperand(rdx, JSGeneratorObject::kParametersAndRegistersOffset));
575 
576  {
577  Label done_loop, loop;
578  __ Set(r9, 0);
579 
580  __ bind(&loop);
581  __ cmpl(r9, rcx);
582  __ j(greater_equal, &done_loop, Label::kNear);
583  __ Push(FieldOperand(rbx, r9, times_pointer_size, FixedArray::kHeaderSize));
584  __ addl(r9, Immediate(1));
585  __ jmp(&loop);
586 
587  __ bind(&done_loop);
588  }
589 
590  // Underlying function needs to have bytecode available.
591  if (FLAG_debug_code) {
592  __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
593  __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset));
594  GetSharedFunctionInfoBytecode(masm, rcx, kScratchRegister);
595  __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
596  __ Assert(equal, AbortReason::kMissingBytecodeArray);
597  }
598 
599  // Resume (Ignition/TurboFan) generator object.
600  {
601  __ PushReturnAddressFrom(rax);
602  __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
603  __ movzxwq(rax, FieldOperand(
604  rax, SharedFunctionInfo::kFormalParameterCountOffset));
605  // We abuse new.target both to indicate that this is a resume call and to
606  // pass in the generator object. In ordinary calls, new.target is always
607  // undefined because generator functions are non-constructable.
608  static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
609  __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
610  __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
611  __ jmp(rcx);
612  }
613 
614  __ bind(&prepare_step_in_if_stepping);
615  {
616  FrameScope scope(masm, StackFrame::INTERNAL);
617  __ Push(rdx);
618  __ Push(rdi);
619  // Push hole as receiver since we do not use it for stepping.
620  __ PushRoot(RootIndex::kTheHoleValue);
621  __ CallRuntime(Runtime::kDebugOnFunctionCall);
622  __ Pop(rdx);
623  __ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
624  }
625  __ jmp(&stepping_prepared);
626 
627  __ bind(&prepare_step_in_suspended_generator);
628  {
629  FrameScope scope(masm, StackFrame::INTERNAL);
630  __ Push(rdx);
631  __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
632  __ Pop(rdx);
633  __ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
634  }
635  __ jmp(&stepping_prepared);
636 
637  __ bind(&stack_overflow);
638  {
639  FrameScope scope(masm, StackFrame::INTERNAL);
640  __ CallRuntime(Runtime::kThrowStackOverflow);
641  __ int3(); // This should be unreachable.
642  }
643 }
644 
645 // TODO(juliana): if we remove the code below then we don't need all
646 // the parameters.
647 static void ReplaceClosureCodeWithOptimizedCode(
648  MacroAssembler* masm, Register optimized_code, Register closure,
649  Register scratch1, Register scratch2, Register scratch3) {
650 
651  // Store the optimized code in the closure.
652  __ movp(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
653  __ movp(scratch1, optimized_code); // Write barrier clobbers scratch1 below.
654  __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
655  kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
656 }
657 
658 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
659  Register scratch2) {
660  Register args_count = scratch1;
661  Register return_pc = scratch2;
662 
663  // Get the arguments + receiver count.
664  __ movp(args_count,
665  Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
666  __ movl(args_count,
667  FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
668 
669  // Leave the frame (also dropping the register file).
670  __ leave();
671 
672  // Drop receiver + arguments.
673  __ PopReturnAddressTo(return_pc);
674  __ addp(rsp, args_count);
675  __ PushReturnAddressFrom(return_pc);
676 }
677 
678 // Tail-call |function_id| if |smi_entry| == |marker|
679 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
680  Register smi_entry,
681  OptimizationMarker marker,
682  Runtime::FunctionId function_id) {
683  Label no_match;
684  __ SmiCompare(smi_entry, Smi::FromEnum(marker));
685  __ j(not_equal, &no_match);
686  GenerateTailCallToReturnedCode(masm, function_id);
687  __ bind(&no_match);
688 }
689 
690 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
691  Register feedback_vector,
692  Register scratch1, Register scratch2,
693  Register scratch3) {
694  // ----------- S t a t e -------------
695  // -- rax : argument count (preserved for callee if needed, and caller)
696  // -- rdx : new target (preserved for callee if needed, and caller)
697  // -- rdi : target function (preserved for callee if needed, and caller)
698  // -- feedback vector (preserved for caller if needed)
699  // -----------------------------------
700  DCHECK(!AreAliased(feedback_vector, rax, rdx, rdi, scratch1, scratch2,
701  scratch3));
702 
703  Label optimized_code_slot_is_weak_ref, fallthrough;
704 
705  Register closure = rdi;
706  Register optimized_code_entry = scratch1;
707 
708  __ movp(optimized_code_entry,
709  FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
710 
711  // Check if the code entry is a Smi. If yes, we interpret it as an
712  // optimisation marker. Otherwise, interpret it as a weak reference to a code
713  // object.
714  __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
715 
716  {
717  // Optimized code slot is a Smi optimization marker.
718 
719  // Fall through if no optimization trigger.
720  __ SmiCompare(optimized_code_entry,
721  Smi::FromEnum(OptimizationMarker::kNone));
722  __ j(equal, &fallthrough);
723 
724  // TODO(v8:8394): The logging of first execution will break if
725  // feedback vectors are not allocated. We need to find a different way of
726  // logging these events if required.
727  TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
728  OptimizationMarker::kLogFirstExecution,
729  Runtime::kFunctionFirstExecution);
730  TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
731  OptimizationMarker::kCompileOptimized,
732  Runtime::kCompileOptimized_NotConcurrent);
733  TailCallRuntimeIfMarkerEquals(
734  masm, optimized_code_entry,
735  OptimizationMarker::kCompileOptimizedConcurrent,
736  Runtime::kCompileOptimized_Concurrent);
737 
738  {
739  // Otherwise, the marker is InOptimizationQueue, so fall through hoping
740  // that an interrupt will eventually update the slot with optimized code.
741  if (FLAG_debug_code) {
742  __ SmiCompare(optimized_code_entry,
743  Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
744  __ Assert(equal, AbortReason::kExpectedOptimizationSentinel);
745  }
746  __ jmp(&fallthrough);
747  }
748  }
749 
750  {
751  // Optimized code slot is a weak reference.
752  __ bind(&optimized_code_slot_is_weak_ref);
753 
754  __ LoadWeakValue(optimized_code_entry, &fallthrough);
755 
756  // Check if the optimized code is marked for deopt. If it is, call the
757  // runtime to clear it.
758  Label found_deoptimized_code;
759  __ movp(scratch2,
760  FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
761  __ testl(
762  FieldOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset),
763  Immediate(1 << Code::kMarkedForDeoptimizationBit));
764  __ j(not_zero, &found_deoptimized_code);
765 
766  // Optimized code is good, get it into the closure and link the closure into
767  // the optimized functions list, then tail call the optimized code.
768  // The feedback vector is no longer used, so re-use it as a scratch
769  // register.
770  ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
771  scratch2, scratch3, feedback_vector);
772  static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
773  __ Move(rcx, optimized_code_entry);
774  __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
775  __ jmp(rcx);
776 
777  // Optimized code slot contains deoptimized code, evict it and re-enter the
778  // closure's code.
779  __ bind(&found_deoptimized_code);
780  GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
781  }
782 
783  // Fall-through if the optimized code cell is clear and there is no
784  // optimization marker.
785  __ bind(&fallthrough);
786 }
787 
788 // Advance the current bytecode offset. This simulates what all bytecode
789 // handlers do upon completion of the underlying operation. Will bail out to a
790 // label if the bytecode (without prefix) is a return bytecode.
791 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
792  Register bytecode_array,
793  Register bytecode_offset,
794  Register bytecode, Register scratch1,
795  Label* if_return) {
796  Register bytecode_size_table = scratch1;
797  DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
798  bytecode));
799 
800  __ Move(bytecode_size_table,
801  ExternalReference::bytecode_size_table_address());
802 
803  // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
804  Label process_bytecode, extra_wide;
805  STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
806  STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
807  STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
808  STATIC_ASSERT(3 ==
809  static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
810  __ cmpb(bytecode, Immediate(0x3));
811  __ j(above, &process_bytecode, Label::kNear);
812  __ testb(bytecode, Immediate(0x1));
813  __ j(not_equal, &extra_wide, Label::kNear);
814 
815  // Load the next bytecode and update table to the wide scaled table.
816  __ incl(bytecode_offset);
817  __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
818  __ addp(bytecode_size_table,
819  Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
820  __ jmp(&process_bytecode, Label::kNear);
821 
822  __ bind(&extra_wide);
823  // Load the next bytecode and update table to the extra wide scaled table.
824  __ incl(bytecode_offset);
825  __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
826  __ addp(bytecode_size_table,
827  Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
828 
829  __ bind(&process_bytecode);
830 
831 // Bailout to the return label if this is a return bytecode.
832 #define JUMP_IF_EQUAL(NAME) \
833  __ cmpb(bytecode, \
834  Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
835  __ j(equal, if_return, Label::kFar);
836  RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
837 #undef JUMP_IF_EQUAL
838 
839  // Otherwise, load the size of the current bytecode and advance the offset.
840  __ addl(bytecode_offset, Operand(bytecode_size_table, bytecode, times_4, 0));
841 }
842 
843 // Generate code for entering a JS function with the interpreter.
844 // On entry to the function the receiver and arguments have been pushed on the
845 // stack left to right. The actual argument count matches the formal parameter
846 // count expected by the function.
847 //
848 // The live registers are:
849 // o rdi: the JS function object being called
850 // o rdx: the incoming new target or generator object
851 // o rsi: our context
852 // o rbp: the caller's frame pointer
853 // o rsp: stack pointer (pointing to return address)
854 //
855 // The function builds an interpreter frame. See InterpreterFrameConstants in
856 // frames.h for its layout.
857 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
858  Register closure = rdi;
859  Register feedback_vector = rbx;
860 
861  // Load the feedback vector from the closure.
862  __ movp(feedback_vector,
863  FieldOperand(closure, JSFunction::kFeedbackCellOffset));
864  __ movp(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
865 
866  Label push_stack_frame;
867  // Check if feedback vector is valid. If valid, check for optimized code
868  // and update invocation count. Otherwise, setup the stack frame.
869  __ JumpIfRoot(feedback_vector, RootIndex::kUndefinedValue, &push_stack_frame);
870 
871  // Read off the optimized code slot in the feedback vector, and if there
872  // is optimized code or an optimization marker, call that instead.
873  MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r14, r15);
874 
875  // Increment invocation count for the function.
876  __ incl(
877  FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
878 
879  // Open a frame scope to indicate that there is a frame on the stack. The
880  // MANUAL indicates that the scope shouldn't actually generate code to set up
881  // the frame (that is done below).
882  __ bind(&push_stack_frame);
883  FrameScope frame_scope(masm, StackFrame::MANUAL);
884  __ pushq(rbp); // Caller's frame pointer.
885  __ movp(rbp, rsp);
886  __ Push(rsi); // Callee's context.
887  __ Push(rdi); // Callee's JS function.
888 
889  // Get the bytecode array from the function object and load it into
890  // kInterpreterBytecodeArrayRegister.
891  __ movp(rax, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
892  __ movp(kInterpreterBytecodeArrayRegister,
893  FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
894  GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister,
895  kScratchRegister);
896 
897  // Check function data field is actually a BytecodeArray object.
898  if (FLAG_debug_code) {
899  __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
900  __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
901  rax);
902  __ Assert(
903  equal,
904  AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
905  }
906 
907  // Reset code age.
908  __ movb(FieldOperand(kInterpreterBytecodeArrayRegister,
909  BytecodeArray::kBytecodeAgeOffset),
910  Immediate(BytecodeArray::kNoAgeBytecodeAge));
911 
912  // Load initial bytecode offset.
913  __ movp(kInterpreterBytecodeOffsetRegister,
914  Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
915 
916  // Push bytecode array and Smi tagged bytecode offset.
917  __ Push(kInterpreterBytecodeArrayRegister);
918  __ SmiTag(rcx, kInterpreterBytecodeOffsetRegister);
919  __ Push(rcx);
920 
921  // Allocate the local and temporary register file on the stack.
922  {
923  // Load frame size from the BytecodeArray object.
924  __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
925  BytecodeArray::kFrameSizeOffset));
926 
927  // Do a stack check to ensure we don't go over the limit.
928  Label ok;
929  __ movp(rax, rsp);
930  __ subp(rax, rcx);
931  __ CompareRoot(rax, RootIndex::kRealStackLimit);
932  __ j(above_equal, &ok, Label::kNear);
933  __ CallRuntime(Runtime::kThrowStackOverflow);
934  __ bind(&ok);
935 
936  // If ok, push undefined as the initial value for all register file entries.
937  Label loop_header;
938  Label loop_check;
939  __ LoadRoot(rax, RootIndex::kUndefinedValue);
940  __ j(always, &loop_check, Label::kNear);
941  __ bind(&loop_header);
942  // TODO(rmcilroy): Consider doing more than one push per loop iteration.
943  __ Push(rax);
944  // Continue loop if not done.
945  __ bind(&loop_check);
946  __ subp(rcx, Immediate(kPointerSize));
947  __ j(greater_equal, &loop_header, Label::kNear);
948  }
949 
950  // If the bytecode array has a valid incoming new target or generator object
951  // register, initialize it with incoming value which was passed in rdx.
952  Label no_incoming_new_target_or_generator_register;
953  __ movsxlq(
954  rax,
955  FieldOperand(kInterpreterBytecodeArrayRegister,
956  BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
957  __ testl(rax, rax);
958  __ j(zero, &no_incoming_new_target_or_generator_register, Label::kNear);
959  __ movp(Operand(rbp, rax, times_pointer_size, 0), rdx);
960  __ bind(&no_incoming_new_target_or_generator_register);
961 
962  // Load accumulator with undefined.
963  __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
964 
965  // Load the dispatch table into a register and dispatch to the bytecode
966  // handler at the current bytecode offset.
967  Label do_dispatch;
968  __ bind(&do_dispatch);
969  __ Move(
970  kInterpreterDispatchTableRegister,
971  ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
972  __ movzxbp(r11, Operand(kInterpreterBytecodeArrayRegister,
973  kInterpreterBytecodeOffsetRegister, times_1, 0));
974  __ movp(
975  kJavaScriptCallCodeStartRegister,
976  Operand(kInterpreterDispatchTableRegister, r11, times_pointer_size, 0));
977  __ call(kJavaScriptCallCodeStartRegister);
978  masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
979 
980  // Any returns to the entry trampoline are either due to the return bytecode
981  // or the interpreter tail calling a builtin and then a dispatch.
982 
983  // Get bytecode array and bytecode offset from the stack frame.
984  __ movp(kInterpreterBytecodeArrayRegister,
985  Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
986  __ movp(kInterpreterBytecodeOffsetRegister,
987  Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
988  __ SmiUntag(kInterpreterBytecodeOffsetRegister,
989  kInterpreterBytecodeOffsetRegister);
990 
991  // Either return, or advance to the next bytecode and dispatch.
992  Label do_return;
993  __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
994  kInterpreterBytecodeOffsetRegister, times_1, 0));
995  AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
996  kInterpreterBytecodeOffsetRegister, rbx, rcx,
997  &do_return);
998  __ jmp(&do_dispatch);
999 
1000  __ bind(&do_return);
1001  // The return value is in rax.
1002  LeaveInterpreterFrame(masm, rbx, rcx);
1003  __ ret(0);
1004 }
1005 
1006 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1007  Register num_args,
1008  Register start_address,
1009  Register scratch) {
1010  // Find the address of the last argument.
1011  __ Move(scratch, num_args);
1012  __ shlp(scratch, Immediate(kPointerSizeLog2));
1013  __ negp(scratch);
1014  __ addp(scratch, start_address);
1015 
1016  // Push the arguments.
1017  Label loop_header, loop_check;
1018  __ j(always, &loop_check, Label::kNear);
1019  __ bind(&loop_header);
1020  __ Push(Operand(start_address, 0));
1021  __ subp(start_address, Immediate(kPointerSize));
1022  __ bind(&loop_check);
1023  __ cmpp(start_address, scratch);
1024  __ j(greater, &loop_header, Label::kNear);
1025 }
1026 
1027 // static
1028 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1029  MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1030  InterpreterPushArgsMode mode) {
1031  DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1032  // ----------- S t a t e -------------
1033  // -- rax : the number of arguments (not including the receiver)
1034  // -- rbx : the address of the first argument to be pushed. Subsequent
1035  // arguments should be consecutive above this, in the same order as
1036  // they are to be pushed onto the stack.
1037  // -- rdi : the target to call (can be any Object).
1038  // -----------------------------------
1039  Label stack_overflow;
1040 
1041  // Number of values to be pushed.
1042  __ leal(rcx, Operand(rax, 1)); // Add one for receiver.
1043 
1044  // Add a stack check before pushing arguments.
1045  Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow);
1046 
1047  // Pop return address to allow tail-call after pushing arguments.
1048  __ PopReturnAddressTo(kScratchRegister);
1049 
1050  // Push "undefined" as the receiver arg if we need to.
1051  if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1052  __ PushRoot(RootIndex::kUndefinedValue);
1053  __ decl(rcx); // Subtract one for receiver.
1054  }
1055 
1056  // rbx and rdx will be modified.
1057  Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
1058 
1059  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1060  __ Pop(rbx); // Pass the spread in a register
1061  __ decl(rax); // Subtract one for spread
1062  }
1063 
1064  // Call the target.
1065  __ PushReturnAddressFrom(kScratchRegister); // Re-push return address.
1066 
1067  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1068  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1069  RelocInfo::CODE_TARGET);
1070  } else {
1071  __ Jump(masm->isolate()->builtins()->Call(receiver_mode),
1072  RelocInfo::CODE_TARGET);
1073  }
1074 
1075  // Throw stack overflow exception.
1076  __ bind(&stack_overflow);
1077  {
1078  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1079  // This should be unreachable.
1080  __ int3();
1081  }
1082 }
1083 
1084 // static
1085 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1086  MacroAssembler* masm, InterpreterPushArgsMode mode) {
1087  // ----------- S t a t e -------------
1088  // -- rax : the number of arguments (not including the receiver)
1089  // -- rdx : the new target (either the same as the constructor or
1090  // the JSFunction on which new was invoked initially)
1091  // -- rdi : the constructor to call (can be any Object)
1092  // -- rbx : the allocation site feedback if available, undefined otherwise
1093  // -- rcx : the address of the first argument to be pushed. Subsequent
1094  // arguments should be consecutive above this, in the same order as
1095  // they are to be pushed onto the stack.
1096  // -----------------------------------
1097  Label stack_overflow;
1098 
1099  // Add a stack check before pushing arguments.
1100  Generate_StackOverflowCheck(masm, rax, r8, &stack_overflow);
1101 
1102  // Pop return address to allow tail-call after pushing arguments.
1103  __ PopReturnAddressTo(kScratchRegister);
1104 
1105  // Push slot for the receiver to be constructed.
1106  __ Push(Immediate(0));
1107 
1108  // rcx and r8 will be modified.
1109  Generate_InterpreterPushArgs(masm, rax, rcx, r8);
1110 
1111  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1112  __ Pop(rbx); // Pass the spread in a register
1113  __ decl(rax); // Subtract one for spread
1114 
1115  // Push return address in preparation for the tail-call.
1116  __ PushReturnAddressFrom(kScratchRegister);
1117  } else {
1118  __ PushReturnAddressFrom(kScratchRegister);
1119  __ AssertUndefinedOrAllocationSite(rbx);
1120  }
1121 
1122  if (mode == InterpreterPushArgsMode::kArrayFunction) {
1123  // Tail call to the array construct stub (still in the caller
1124  // context at this point).
1125  __ AssertFunction(rdi);
1126  // Jump to the constructor function (rax, rbx, rdx passed on).
1127  Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1128  __ Jump(code, RelocInfo::CODE_TARGET);
1129  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1130  // Call the constructor (rax, rdx, rdi passed on).
1131  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1132  RelocInfo::CODE_TARGET);
1133  } else {
1134  DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1135  // Call the constructor (rax, rdx, rdi passed on).
1136  __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1137  }
1138 
1139  // Throw stack overflow exception.
1140  __ bind(&stack_overflow);
1141  {
1142  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1143  // This should be unreachable.
1144  __ int3();
1145  }
1146 }
1147 
1148 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1149  // Set the return address to the correct point in the interpreter entry
1150  // trampoline.
1151  Label builtin_trampoline, trampoline_loaded;
1152  Smi interpreter_entry_return_pc_offset(
1153  masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1154  DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1155 
1156  // If the SFI function_data is an InterpreterData, the function will have a
1157  // custom copy of the interpreter entry trampoline for profiling. If so,
1158  // get the custom trampoline, otherwise grab the entry address of the global
1159  // trampoline.
1160  __ movp(rbx, Operand(rbp, StandardFrameConstants::kFunctionOffset));
1161  __ movp(rbx, FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset));
1162  __ movp(rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
1163  __ CmpObjectType(rbx, INTERPRETER_DATA_TYPE, kScratchRegister);
1164  __ j(not_equal, &builtin_trampoline, Label::kNear);
1165 
1166  __ movp(rbx,
1167  FieldOperand(rbx, InterpreterData::kInterpreterTrampolineOffset));
1168  __ addp(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1169  __ jmp(&trampoline_loaded, Label::kNear);
1170 
1171  __ bind(&builtin_trampoline);
1172  __ movp(rbx,
1173  __ ExternalReferenceAsOperand(
1174  ExternalReference::
1175  address_of_interpreter_entry_trampoline_instruction_start(
1176  masm->isolate()),
1177  kScratchRegister));
1178 
1179  __ bind(&trampoline_loaded);
1180  __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value()));
1181  __ Push(rbx);
1182 
1183  // Initialize dispatch table register.
1184  __ Move(
1185  kInterpreterDispatchTableRegister,
1186  ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1187 
1188  // Get the bytecode array pointer from the frame.
1189  __ movp(kInterpreterBytecodeArrayRegister,
1190  Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1191 
1192  if (FLAG_debug_code) {
1193  // Check function data field is actually a BytecodeArray object.
1194  __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1195  __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1196  rbx);
1197  __ Assert(
1198  equal,
1199  AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1200  }
1201 
1202  // Get the target bytecode offset from the frame.
1203  __ movp(kInterpreterBytecodeOffsetRegister,
1204  Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1205  __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1206  kInterpreterBytecodeOffsetRegister);
1207 
1208  // Dispatch to the target bytecode.
1209  __ movzxbp(r11, Operand(kInterpreterBytecodeArrayRegister,
1210  kInterpreterBytecodeOffsetRegister, times_1, 0));
1211  __ movp(
1212  kJavaScriptCallCodeStartRegister,
1213  Operand(kInterpreterDispatchTableRegister, r11, times_pointer_size, 0));
1214  __ jmp(kJavaScriptCallCodeStartRegister);
1215 }
1216 
1217 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1218  // Get bytecode array and bytecode offset from the stack frame.
1219  __ movp(kInterpreterBytecodeArrayRegister,
1220  Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1221  __ movp(kInterpreterBytecodeOffsetRegister,
1222  Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1223  __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1224  kInterpreterBytecodeOffsetRegister);
1225 
1226  // Load the current bytecode.
1227  __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
1228  kInterpreterBytecodeOffsetRegister, times_1, 0));
1229 
1230  // Advance to the next bytecode.
1231  Label if_return;
1232  AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1233  kInterpreterBytecodeOffsetRegister, rbx, rcx,
1234  &if_return);
1235 
1236  // Convert new bytecode offset to a Smi and save in the stackframe.
1237  __ SmiTag(rbx, kInterpreterBytecodeOffsetRegister);
1238  __ movp(Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp), rbx);
1239 
1240  Generate_InterpreterEnterBytecode(masm);
1241 
1242  // We should never take the if_return path.
1243  __ bind(&if_return);
1244  __ Abort(AbortReason::kInvalidBytecodeAdvance);
1245 }
1246 
1247 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1248  Generate_InterpreterEnterBytecode(masm);
1249 }
1250 
1251 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1252  // ----------- S t a t e -------------
1253  // -- rax : argument count (preserved for callee)
1254  // -- rdx : new target (preserved for callee)
1255  // -- rdi : target function (preserved for callee)
1256  // -----------------------------------
1257  Label failed;
1258  {
1259  FrameScope scope(masm, StackFrame::INTERNAL);
1260  // Preserve argument count for later compare.
1261  __ movp(rcx, rax);
1262  // Push the number of arguments to the callee.
1263  __ SmiTag(rax, rax);
1264  __ Push(rax);
1265  // Push a copy of the target function and the new target.
1266  __ Push(rdi);
1267  __ Push(rdx);
1268 
1269  // The function.
1270  __ Push(rdi);
1271  // Copy arguments from caller (stdlib, foreign, heap).
1272  Label args_done;
1273  for (int j = 0; j < 4; ++j) {
1274  Label over;
1275  if (j < 3) {
1276  __ cmpp(rcx, Immediate(j));
1277  __ j(not_equal, &over, Label::kNear);
1278  }
1279  for (int i = j - 1; i >= 0; --i) {
1280  __ Push(Operand(
1281  rbp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
1282  }
1283  for (int i = 0; i < 3 - j; ++i) {
1284  __ PushRoot(RootIndex::kUndefinedValue);
1285  }
1286  if (j < 3) {
1287  __ jmp(&args_done, Label::kNear);
1288  __ bind(&over);
1289  }
1290  }
1291  __ bind(&args_done);
1292 
1293  // Call runtime, on success unwind frame, and parent frame.
1294  __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1295  // A smi 0 is returned on failure, an object on success.
1296  __ JumpIfSmi(rax, &failed, Label::kNear);
1297 
1298  __ Drop(2);
1299  __ Pop(rcx);
1300  __ SmiUntag(rcx, rcx);
1301  scope.GenerateLeaveFrame();
1302 
1303  __ PopReturnAddressTo(rbx);
1304  __ incp(rcx);
1305  __ leap(rsp, Operand(rsp, rcx, times_pointer_size, 0));
1306  __ PushReturnAddressFrom(rbx);
1307  __ ret(0);
1308 
1309  __ bind(&failed);
1310  // Restore target function and new target.
1311  __ Pop(rdx);
1312  __ Pop(rdi);
1313  __ Pop(rax);
1314  __ SmiUntag(rax, rax);
1315  }
1316  // On failure, tail call back to regular js by re-calling the function
1317  // which has be reset to the compile lazy builtin.
1318  __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1319  __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1320  __ jmp(rcx);
1321 }
1322 
1323 namespace {
1324 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1325  bool java_script_builtin,
1326  bool with_result) {
1327  const RegisterConfiguration* config(RegisterConfiguration::Default());
1328  int allocatable_register_count = config->num_allocatable_general_registers();
1329  if (with_result) {
1330  // Overwrite the hole inserted by the deoptimizer with the return value from
1331  // the LAZY deopt point.
1332  __ movq(Operand(rsp,
1333  config->num_allocatable_general_registers() * kPointerSize +
1334  BuiltinContinuationFrameConstants::kFixedFrameSize),
1335  rax);
1336  }
1337  for (int i = allocatable_register_count - 1; i >= 0; --i) {
1338  int code = config->GetAllocatableGeneralCode(i);
1339  __ popq(Register::from_code(code));
1340  if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1341  __ SmiUntag(Register::from_code(code), Register::from_code(code));
1342  }
1343  }
1344  __ movq(
1345  rbp,
1346  Operand(rsp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1347  const int offsetToPC =
1348  BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp - kPointerSize;
1349  __ popq(Operand(rsp, offsetToPC));
1350  __ Drop(offsetToPC / kPointerSize);
1351  __ addq(Operand(rsp, 0), Immediate(Code::kHeaderSize - kHeapObjectTag));
1352  __ Ret();
1353 }
1354 } // namespace
1355 
1356 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1357  Generate_ContinueToBuiltinHelper(masm, false, false);
1358 }
1359 
1360 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1361  MacroAssembler* masm) {
1362  Generate_ContinueToBuiltinHelper(masm, false, true);
1363 }
1364 
1365 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1366  Generate_ContinueToBuiltinHelper(masm, true, false);
1367 }
1368 
1369 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1370  MacroAssembler* masm) {
1371  Generate_ContinueToBuiltinHelper(masm, true, true);
1372 }
1373 
1374 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1375  // Enter an internal frame.
1376  {
1377  FrameScope scope(masm, StackFrame::INTERNAL);
1378  __ CallRuntime(Runtime::kNotifyDeoptimized);
1379  // Tear down internal frame.
1380  }
1381 
1382  DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
1383  __ movp(rax, Operand(rsp, kPCOnStackSize));
1384  __ ret(1 * kPointerSize); // Remove rax.
1385 }
1386 
1387 // static
1388 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1389  // ----------- S t a t e -------------
1390  // -- rax : argc
1391  // -- rsp[0] : return address
1392  // -- rsp[8] : argArray
1393  // -- rsp[16] : thisArg
1394  // -- rsp[24] : receiver
1395  // -----------------------------------
1396 
1397  // 1. Load receiver into rdi, argArray into rbx (if present), remove all
1398  // arguments from the stack (including the receiver), and push thisArg (if
1399  // present) instead.
1400  {
1401  Label no_arg_array, no_this_arg;
1402  StackArgumentsAccessor args(rsp, rax);
1403  __ LoadRoot(rdx, RootIndex::kUndefinedValue);
1404  __ movp(rbx, rdx);
1405  __ movp(rdi, args.GetReceiverOperand());
1406  __ testp(rax, rax);
1407  __ j(zero, &no_this_arg, Label::kNear);
1408  {
1409  __ movp(rdx, args.GetArgumentOperand(1));
1410  __ cmpp(rax, Immediate(1));
1411  __ j(equal, &no_arg_array, Label::kNear);
1412  __ movp(rbx, args.GetArgumentOperand(2));
1413  __ bind(&no_arg_array);
1414  }
1415  __ bind(&no_this_arg);
1416  __ PopReturnAddressTo(rcx);
1417  __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1418  __ Push(rdx);
1419  __ PushReturnAddressFrom(rcx);
1420  }
1421 
1422  // ----------- S t a t e -------------
1423  // -- rbx : argArray
1424  // -- rdi : receiver
1425  // -- rsp[0] : return address
1426  // -- rsp[8] : thisArg
1427  // -----------------------------------
1428 
1429  // 2. We don't need to check explicitly for callable receiver here,
1430  // since that's the first thing the Call/CallWithArrayLike builtins
1431  // will do.
1432 
1433  // 3. Tail call with no arguments if argArray is null or undefined.
1434  Label no_arguments;
1435  __ JumpIfRoot(rbx, RootIndex::kNullValue, &no_arguments, Label::kNear);
1436  __ JumpIfRoot(rbx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
1437 
1438  // 4a. Apply the receiver to the given argArray.
1439  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1440  RelocInfo::CODE_TARGET);
1441 
1442  // 4b. The argArray is either null or undefined, so we tail call without any
1443  // arguments to the receiver. Since we did not create a frame for
1444  // Function.prototype.apply() yet, we use a normal Call builtin here.
1445  __ bind(&no_arguments);
1446  {
1447  __ Set(rax, 0);
1448  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1449  }
1450 }
1451 
1452 // static
1453 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1454  // Stack Layout:
1455  // rsp[0] : Return address
1456  // rsp[8] : Argument n
1457  // rsp[16] : Argument n-1
1458  // ...
1459  // rsp[8 * n] : Argument 1
1460  // rsp[8 * (n + 1)] : Receiver (callable to call)
1461  //
1462  // rax contains the number of arguments, n, not counting the receiver.
1463  //
1464  // 1. Make sure we have at least one argument.
1465  {
1466  Label done;
1467  __ testp(rax, rax);
1468  __ j(not_zero, &done, Label::kNear);
1469  __ PopReturnAddressTo(rbx);
1470  __ PushRoot(RootIndex::kUndefinedValue);
1471  __ PushReturnAddressFrom(rbx);
1472  __ incp(rax);
1473  __ bind(&done);
1474  }
1475 
1476  // 2. Get the callable to call (passed as receiver) from the stack.
1477  {
1478  StackArgumentsAccessor args(rsp, rax);
1479  __ movp(rdi, args.GetReceiverOperand());
1480  }
1481 
1482  // 3. Shift arguments and return address one slot down on the stack
1483  // (overwriting the original receiver). Adjust argument count to make
1484  // the original first argument the new receiver.
1485  {
1486  Label loop;
1487  __ movp(rcx, rax);
1488  StackArgumentsAccessor args(rsp, rcx);
1489  __ bind(&loop);
1490  __ movp(rbx, args.GetArgumentOperand(1));
1491  __ movp(args.GetArgumentOperand(0), rbx);
1492  __ decp(rcx);
1493  __ j(not_zero, &loop); // While non-zero.
1494  __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1495  __ decp(rax); // One fewer argument (first argument is new receiver).
1496  }
1497 
1498  // 4. Call the callable.
1499  // Since we did not create a frame for Function.prototype.call() yet,
1500  // we use a normal Call builtin here.
1501  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1502 }
1503 
1504 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1505  // ----------- S t a t e -------------
1506  // -- rax : argc
1507  // -- rsp[0] : return address
1508  // -- rsp[8] : argumentsList
1509  // -- rsp[16] : thisArgument
1510  // -- rsp[24] : target
1511  // -- rsp[32] : receiver
1512  // -----------------------------------
1513 
1514  // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1515  // remove all arguments from the stack (including the receiver), and push
1516  // thisArgument (if present) instead.
1517  {
1518  Label done;
1519  StackArgumentsAccessor args(rsp, rax);
1520  __ LoadRoot(rdi, RootIndex::kUndefinedValue);
1521  __ movp(rdx, rdi);
1522  __ movp(rbx, rdi);
1523  __ cmpp(rax, Immediate(1));
1524  __ j(below, &done, Label::kNear);
1525  __ movp(rdi, args.GetArgumentOperand(1)); // target
1526  __ j(equal, &done, Label::kNear);
1527  __ movp(rdx, args.GetArgumentOperand(2)); // thisArgument
1528  __ cmpp(rax, Immediate(3));
1529  __ j(below, &done, Label::kNear);
1530  __ movp(rbx, args.GetArgumentOperand(3)); // argumentsList
1531  __ bind(&done);
1532  __ PopReturnAddressTo(rcx);
1533  __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1534  __ Push(rdx);
1535  __ PushReturnAddressFrom(rcx);
1536  }
1537 
1538  // ----------- S t a t e -------------
1539  // -- rbx : argumentsList
1540  // -- rdi : target
1541  // -- rsp[0] : return address
1542  // -- rsp[8] : thisArgument
1543  // -----------------------------------
1544 
1545  // 2. We don't need to check explicitly for callable target here,
1546  // since that's the first thing the Call/CallWithArrayLike builtins
1547  // will do.
1548 
1549  // 3. Apply the target to the given argumentsList.
1550  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1551  RelocInfo::CODE_TARGET);
1552 }
1553 
1554 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1555  // ----------- S t a t e -------------
1556  // -- rax : argc
1557  // -- rsp[0] : return address
1558  // -- rsp[8] : new.target (optional)
1559  // -- rsp[16] : argumentsList
1560  // -- rsp[24] : target
1561  // -- rsp[32] : receiver
1562  // -----------------------------------
1563 
1564  // 1. Load target into rdi (if present), argumentsList into rbx (if present),
1565  // new.target into rdx (if present, otherwise use target), remove all
1566  // arguments from the stack (including the receiver), and push thisArgument
1567  // (if present) instead.
1568  {
1569  Label done;
1570  StackArgumentsAccessor args(rsp, rax);
1571  __ LoadRoot(rdi, RootIndex::kUndefinedValue);
1572  __ movp(rdx, rdi);
1573  __ movp(rbx, rdi);
1574  __ cmpp(rax, Immediate(1));
1575  __ j(below, &done, Label::kNear);
1576  __ movp(rdi, args.GetArgumentOperand(1)); // target
1577  __ movp(rdx, rdi); // new.target defaults to target
1578  __ j(equal, &done, Label::kNear);
1579  __ movp(rbx, args.GetArgumentOperand(2)); // argumentsList
1580  __ cmpp(rax, Immediate(3));
1581  __ j(below, &done, Label::kNear);
1582  __ movp(rdx, args.GetArgumentOperand(3)); // new.target
1583  __ bind(&done);
1584  __ PopReturnAddressTo(rcx);
1585  __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1586  __ PushRoot(RootIndex::kUndefinedValue);
1587  __ PushReturnAddressFrom(rcx);
1588  }
1589 
1590  // ----------- S t a t e -------------
1591  // -- rbx : argumentsList
1592  // -- rdx : new.target
1593  // -- rdi : target
1594  // -- rsp[0] : return address
1595  // -- rsp[8] : receiver (undefined)
1596  // -----------------------------------
1597 
1598  // 2. We don't need to check explicitly for constructor target here,
1599  // since that's the first thing the Construct/ConstructWithArrayLike
1600  // builtins will do.
1601 
1602  // 3. We don't need to check explicitly for constructor new.target here,
1603  // since that's the second thing the Construct/ConstructWithArrayLike
1604  // builtins will do.
1605 
1606  // 4. Construct the target with the given new.target and argumentsList.
1607  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1608  RelocInfo::CODE_TARGET);
1609 }
1610 
1611 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
1612  // ----------- S t a t e -------------
1613  // -- rax : argc
1614  // -- rsp[0] : return address
1615  // -- rsp[8] : last argument
1616  // -----------------------------------
1617  Label generic_array_code;
1618 
1619  if (FLAG_debug_code) {
1620  // Initial map for the builtin InternalArray functions should be maps.
1621  __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1622  // Will both indicate a nullptr and a Smi.
1623  STATIC_ASSERT(kSmiTag == 0);
1624  Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1625  __ Check(not_smi,
1626  AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1627  __ CmpObjectType(rbx, MAP_TYPE, rcx);
1628  __ Check(equal, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1629  }
1630 
1631  // Run the native code for the InternalArray function called as a normal
1632  // function.
1633  __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
1634  RelocInfo::CODE_TARGET);
1635 }
1636 
1637 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1638  __ pushq(rbp);
1639  __ movp(rbp, rsp);
1640 
1641  // Store the arguments adaptor context sentinel.
1642  __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1643 
1644  // Push the function on the stack.
1645  __ Push(rdi);
1646 
1647  // Preserve the number of arguments on the stack. Must preserve rax,
1648  // rbx and rcx because these registers are used when copying the
1649  // arguments and the receiver.
1650  __ SmiTag(r8, rax);
1651  __ Push(r8);
1652 
1653  __ Push(Immediate(0)); // Padding.
1654 }
1655 
1656 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1657  // Retrieve the number of arguments from the stack. Number is a Smi.
1658  __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1659 
1660  // Leave the frame.
1661  __ movp(rsp, rbp);
1662  __ popq(rbp);
1663 
1664  // Remove caller arguments from the stack.
1665  __ PopReturnAddressTo(rcx);
1666  SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1667  __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1668  __ PushReturnAddressFrom(rcx);
1669 }
1670 
1671 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1672  // ----------- S t a t e -------------
1673  // -- rax : actual number of arguments
1674  // -- rbx : expected number of arguments
1675  // -- rdx : new target (passed through to callee)
1676  // -- rdi : function (passed through to callee)
1677  // -----------------------------------
1678 
1679  Label invoke, dont_adapt_arguments, stack_overflow, enough, too_few;
1680  __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1681  __ j(equal, &dont_adapt_arguments);
1682  __ cmpp(rax, rbx);
1683  __ j(less, &too_few);
1684 
1685  { // Enough parameters: Actual >= expected.
1686  __ bind(&enough);
1687  EnterArgumentsAdaptorFrame(masm);
1688  // The registers rcx and r8 will be modified. The register rbx is only read.
1689  Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
1690 
1691  // Copy receiver and all expected arguments.
1692  const int offset = StandardFrameConstants::kCallerSPOffset;
1693  __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
1694  __ Set(r8, -1); // account for receiver
1695 
1696  Label copy;
1697  __ bind(&copy);
1698  __ incp(r8);
1699  __ Push(Operand(rax, 0));
1700  __ subp(rax, Immediate(kPointerSize));
1701  __ cmpp(r8, rbx);
1702  __ j(less, &copy);
1703  __ jmp(&invoke);
1704  }
1705 
1706  { // Too few parameters: Actual < expected.
1707  __ bind(&too_few);
1708 
1709  EnterArgumentsAdaptorFrame(masm);
1710  // The registers rcx and r8 will be modified. The register rbx is only read.
1711  Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
1712 
1713  // Copy receiver and all actual arguments.
1714  const int offset = StandardFrameConstants::kCallerSPOffset;
1715  __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
1716  __ Set(r8, -1); // account for receiver
1717 
1718  Label copy;
1719  __ bind(&copy);
1720  __ incp(r8);
1721  __ Push(Operand(rdi, 0));
1722  __ subp(rdi, Immediate(kPointerSize));
1723  __ cmpp(r8, rax);
1724  __ j(less, &copy);
1725 
1726  // Fill remaining expected arguments with undefined values.
1727  Label fill;
1728  __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
1729  __ bind(&fill);
1730  __ incp(r8);
1731  __ Push(kScratchRegister);
1732  __ cmpp(r8, rbx);
1733  __ j(less, &fill);
1734 
1735  // Restore function pointer.
1736  __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
1737  }
1738 
1739  // Call the entry point.
1740  __ bind(&invoke);
1741  __ movp(rax, rbx);
1742  // rax : expected number of arguments
1743  // rdx : new target (passed through to callee)
1744  // rdi : function (passed through to callee)
1745  static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
1746  __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1747  __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1748  __ call(rcx);
1749 
1750  // Store offset of return address for deoptimizer.
1751  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1752 
1753  // Leave frame and return.
1754  LeaveArgumentsAdaptorFrame(masm);
1755  __ ret(0);
1756 
1757  // -------------------------------------------
1758  // Dont adapt arguments.
1759  // -------------------------------------------
1760  __ bind(&dont_adapt_arguments);
1761  static_assert(kJavaScriptCallCodeStartRegister == rcx, "ABI mismatch");
1762  __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1763  __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1764  __ jmp(rcx);
1765 
1766  __ bind(&stack_overflow);
1767  {
1768  FrameScope frame(masm, StackFrame::MANUAL);
1769  __ CallRuntime(Runtime::kThrowStackOverflow);
1770  __ int3();
1771  }
1772 }
1773 
1774 // static
1775 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1776  Handle<Code> code) {
1777  // ----------- S t a t e -------------
1778  // -- rdi : target
1779  // -- rax : number of parameters on the stack (not including the receiver)
1780  // -- rbx : arguments list (a FixedArray)
1781  // -- rcx : len (number of elements to push from args)
1782  // -- rdx : new.target (for [[Construct]])
1783  // -- rsp[0] : return address
1784  // -----------------------------------
1785  if (masm->emit_debug_code()) {
1786  // Allow rbx to be a FixedArray, or a FixedDoubleArray if rcx == 0.
1787  Label ok, fail;
1788  __ AssertNotSmi(rbx);
1789  Register map = r9;
1790  __ movp(map, FieldOperand(rbx, HeapObject::kMapOffset));
1791  __ CmpInstanceType(map, FIXED_ARRAY_TYPE);
1792  __ j(equal, &ok);
1793  __ CmpInstanceType(map, FIXED_DOUBLE_ARRAY_TYPE);
1794  __ j(not_equal, &fail);
1795  __ cmpl(rcx, Immediate(0));
1796  __ j(equal, &ok);
1797  // Fall through.
1798  __ bind(&fail);
1799  __ Abort(AbortReason::kOperandIsNotAFixedArray);
1800 
1801  __ bind(&ok);
1802  }
1803 
1804  Label stack_overflow;
1805  Generate_StackOverflowCheck(masm, rcx, r8, &stack_overflow, Label::kNear);
1806 
1807  // Push additional arguments onto the stack.
1808  {
1809  __ PopReturnAddressTo(r8);
1810  __ Set(r9, 0);
1811  Label done, push, loop;
1812  __ bind(&loop);
1813  __ cmpl(r9, rcx);
1814  __ j(equal, &done, Label::kNear);
1815  // Turn the hole into undefined as we go.
1816  __ movp(r11,
1817  FieldOperand(rbx, r9, times_pointer_size, FixedArray::kHeaderSize));
1818  __ CompareRoot(r11, RootIndex::kTheHoleValue);
1819  __ j(not_equal, &push, Label::kNear);
1820  __ LoadRoot(r11, RootIndex::kUndefinedValue);
1821  __ bind(&push);
1822  __ Push(r11);
1823  __ incl(r9);
1824  __ jmp(&loop);
1825  __ bind(&done);
1826  __ PushReturnAddressFrom(r8);
1827  __ addq(rax, r9);
1828  }
1829 
1830  // Tail-call to the actual Call or Construct builtin.
1831  __ Jump(code, RelocInfo::CODE_TARGET);
1832 
1833  __ bind(&stack_overflow);
1834  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1835 }
1836 
1837 // static
1838 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1839  CallOrConstructMode mode,
1840  Handle<Code> code) {
1841  // ----------- S t a t e -------------
1842  // -- rax : the number of arguments (not including the receiver)
1843  // -- rdx : the new target (for [[Construct]] calls)
1844  // -- rdi : the target to call (can be any Object)
1845  // -- rcx : start index (to support rest parameters)
1846  // -----------------------------------
1847 
1848  // Check if new.target has a [[Construct]] internal method.
1849  if (mode == CallOrConstructMode::kConstruct) {
1850  Label new_target_constructor, new_target_not_constructor;
1851  __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
1852  __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
1853  __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
1854  Immediate(Map::IsConstructorBit::kMask));
1855  __ j(not_zero, &new_target_constructor, Label::kNear);
1856  __ bind(&new_target_not_constructor);
1857  {
1858  FrameScope scope(masm, StackFrame::MANUAL);
1859  __ EnterFrame(StackFrame::INTERNAL);
1860  __ Push(rdx);
1861  __ CallRuntime(Runtime::kThrowNotConstructor);
1862  }
1863  __ bind(&new_target_constructor);
1864  }
1865 
1866  // Check if we have an arguments adaptor frame below the function frame.
1867  Label arguments_adaptor, arguments_done;
1868  __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1869  __ cmpp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
1870  Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1871  __ j(equal, &arguments_adaptor, Label::kNear);
1872  {
1873  __ movp(r8, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1874  __ movp(r8, FieldOperand(r8, JSFunction::kSharedFunctionInfoOffset));
1875  __ movzxwq(
1876  r8, FieldOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
1877  __ movp(rbx, rbp);
1878  }
1879  __ jmp(&arguments_done, Label::kNear);
1880  __ bind(&arguments_adaptor);
1881  {
1882  __ SmiUntag(r8,
1883  Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1884  }
1885  __ bind(&arguments_done);
1886 
1887  Label stack_done, stack_overflow;
1888  __ subl(r8, rcx);
1889  __ j(less_equal, &stack_done);
1890  {
1891  // Check for stack overflow.
1892  Generate_StackOverflowCheck(masm, r8, rcx, &stack_overflow, Label::kNear);
1893 
1894  // Forward the arguments from the caller frame.
1895  {
1896  Label loop;
1897  __ addl(rax, r8);
1898  __ PopReturnAddressTo(rcx);
1899  __ bind(&loop);
1900  {
1901  StackArgumentsAccessor args(rbx, r8, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1902  __ Push(args.GetArgumentOperand(0));
1903  __ decl(r8);
1904  __ j(not_zero, &loop);
1905  }
1906  __ PushReturnAddressFrom(rcx);
1907  }
1908  }
1909  __ jmp(&stack_done, Label::kNear);
1910  __ bind(&stack_overflow);
1911  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1912  __ bind(&stack_done);
1913 
1914  // Tail-call to the {code} handler.
1915  __ Jump(code, RelocInfo::CODE_TARGET);
1916 }
1917 
1918 // static
1919 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1920  ConvertReceiverMode mode) {
1921  // ----------- S t a t e -------------
1922  // -- rax : the number of arguments (not including the receiver)
1923  // -- rdi : the function to call (checked to be a JSFunction)
1924  // -----------------------------------
1925  StackArgumentsAccessor args(rsp, rax);
1926  __ AssertFunction(rdi);
1927 
1928  // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1929  // Check that the function is not a "classConstructor".
1930  Label class_constructor;
1931  __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1932  __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
1933  Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask));
1934  __ j(not_zero, &class_constructor);
1935 
1936  // ----------- S t a t e -------------
1937  // -- rax : the number of arguments (not including the receiver)
1938  // -- rdx : the shared function info.
1939  // -- rdi : the function to call (checked to be a JSFunction)
1940  // -----------------------------------
1941 
1942  // Enter the context of the function; ToObject has to run in the function
1943  // context, and we also need to take the global proxy from the function
1944  // context in case of conversion.
1945  __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1946  // We need to convert the receiver for non-native sloppy mode functions.
1947  Label done_convert;
1948  __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
1949  Immediate(SharedFunctionInfo::IsNativeBit::kMask |
1950  SharedFunctionInfo::IsStrictBit::kMask));
1951  __ j(not_zero, &done_convert);
1952  {
1953  // ----------- S t a t e -------------
1954  // -- rax : the number of arguments (not including the receiver)
1955  // -- rdx : the shared function info.
1956  // -- rdi : the function to call (checked to be a JSFunction)
1957  // -- rsi : the function context.
1958  // -----------------------------------
1959 
1960  if (mode == ConvertReceiverMode::kNullOrUndefined) {
1961  // Patch receiver to global proxy.
1962  __ LoadGlobalProxy(rcx);
1963  } else {
1964  Label convert_to_object, convert_receiver;
1965  __ movp(rcx, args.GetReceiverOperand());
1966  __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
1967  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1968  __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
1969  __ j(above_equal, &done_convert);
1970  if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1971  Label convert_global_proxy;
1972  __ JumpIfRoot(rcx, RootIndex::kUndefinedValue, &convert_global_proxy,
1973  Label::kNear);
1974  __ JumpIfNotRoot(rcx, RootIndex::kNullValue, &convert_to_object,
1975  Label::kNear);
1976  __ bind(&convert_global_proxy);
1977  {
1978  // Patch receiver to global proxy.
1979  __ LoadGlobalProxy(rcx);
1980  }
1981  __ jmp(&convert_receiver);
1982  }
1983  __ bind(&convert_to_object);
1984  {
1985  // Convert receiver using ToObject.
1986  // TODO(bmeurer): Inline the allocation here to avoid building the frame
1987  // in the fast case? (fall back to AllocateInNewSpace?)
1988  FrameScope scope(masm, StackFrame::INTERNAL);
1989  __ SmiTag(rax, rax);
1990  __ Push(rax);
1991  __ Push(rdi);
1992  __ movp(rax, rcx);
1993  __ Push(rsi);
1994  __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1995  RelocInfo::CODE_TARGET);
1996  __ Pop(rsi);
1997  __ movp(rcx, rax);
1998  __ Pop(rdi);
1999  __ Pop(rax);
2000  __ SmiUntag(rax, rax);
2001  }
2002  __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2003  __ bind(&convert_receiver);
2004  }
2005  __ movp(args.GetReceiverOperand(), rcx);
2006  }
2007  __ bind(&done_convert);
2008 
2009  // ----------- S t a t e -------------
2010  // -- rax : the number of arguments (not including the receiver)
2011  // -- rdx : the shared function info.
2012  // -- rdi : the function to call (checked to be a JSFunction)
2013  // -- rsi : the function context.
2014  // -----------------------------------
2015 
2016  __ movzxwq(
2017  rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2018  ParameterCount actual(rax);
2019  ParameterCount expected(rbx);
2020 
2021  __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION);
2022 
2023  // The function is a "classConstructor", need to raise an exception.
2024  __ bind(&class_constructor);
2025  {
2026  FrameScope frame(masm, StackFrame::INTERNAL);
2027  __ Push(rdi);
2028  __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2029  }
2030 }
2031 
2032 namespace {
2033 
2034 void Generate_PushBoundArguments(MacroAssembler* masm) {
2035  // ----------- S t a t e -------------
2036  // -- rax : the number of arguments (not including the receiver)
2037  // -- rdx : new.target (only in case of [[Construct]])
2038  // -- rdi : target (checked to be a JSBoundFunction)
2039  // -----------------------------------
2040 
2041  // Load [[BoundArguments]] into rcx and length of that into rbx.
2042  Label no_bound_arguments;
2043  __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2044  __ SmiUntag(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2045  __ testl(rbx, rbx);
2046  __ j(zero, &no_bound_arguments);
2047  {
2048  // ----------- S t a t e -------------
2049  // -- rax : the number of arguments (not including the receiver)
2050  // -- rdx : new.target (only in case of [[Construct]])
2051  // -- rdi : target (checked to be a JSBoundFunction)
2052  // -- rcx : the [[BoundArguments]] (implemented as FixedArray)
2053  // -- rbx : the number of [[BoundArguments]] (checked to be non-zero)
2054  // -----------------------------------
2055 
2056  // Reserve stack space for the [[BoundArguments]].
2057  {
2058  Label done;
2059  __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
2060  __ subp(rsp, kScratchRegister);
2061  // Check the stack for overflow. We are not trying to catch interruptions
2062  // (i.e. debug break and preemption) here, so check the "real stack
2063  // limit".
2064  __ CompareRoot(rsp, RootIndex::kRealStackLimit);
2065  __ j(above_equal, &done, Label::kNear);
2066  // Restore the stack pointer.
2067  __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
2068  {
2069  FrameScope scope(masm, StackFrame::MANUAL);
2070  __ EnterFrame(StackFrame::INTERNAL);
2071  __ CallRuntime(Runtime::kThrowStackOverflow);
2072  }
2073  __ bind(&done);
2074  }
2075 
2076  // Adjust effective number of arguments to include return address.
2077  __ incl(rax);
2078 
2079  // Relocate arguments and return address down the stack.
2080  {
2081  Label loop;
2082  __ Set(rcx, 0);
2083  __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
2084  __ bind(&loop);
2085  __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
2086  __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
2087  __ incl(rcx);
2088  __ cmpl(rcx, rax);
2089  __ j(less, &loop);
2090  }
2091 
2092  // Copy [[BoundArguments]] to the stack (below the arguments).
2093  {
2094  Label loop;
2095  __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2096  __ SmiUntag(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2097  __ bind(&loop);
2098  __ decl(rbx);
2099  __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size,
2100  FixedArray::kHeaderSize));
2101  __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister);
2102  __ leal(rax, Operand(rax, 1));
2103  __ j(greater, &loop);
2104  }
2105 
2106  // Adjust effective number of arguments (rax contains the number of
2107  // arguments from the call plus return address plus the number of
2108  // [[BoundArguments]]), so we need to subtract one for the return address.
2109  __ decl(rax);
2110  }
2111  __ bind(&no_bound_arguments);
2112 }
2113 
2114 } // namespace
2115 
2116 // static
2117 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2118  // ----------- S t a t e -------------
2119  // -- rax : the number of arguments (not including the receiver)
2120  // -- rdi : the function to call (checked to be a JSBoundFunction)
2121  // -----------------------------------
2122  __ AssertBoundFunction(rdi);
2123 
2124  // Patch the receiver to [[BoundThis]].
2125  StackArgumentsAccessor args(rsp, rax);
2126  __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
2127  __ movp(args.GetReceiverOperand(), rbx);
2128 
2129  // Push the [[BoundArguments]] onto the stack.
2130  Generate_PushBoundArguments(masm);
2131 
2132  // Call the [[BoundTargetFunction]] via the Call builtin.
2133  __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2134  __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2135  RelocInfo::CODE_TARGET);
2136 }
2137 
2138 // static
2139 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2140  // ----------- S t a t e -------------
2141  // -- rax : the number of arguments (not including the receiver)
2142  // -- rdi : the target to call (can be any Object)
2143  // -----------------------------------
2144  StackArgumentsAccessor args(rsp, rax);
2145 
2146  Label non_callable;
2147  __ JumpIfSmi(rdi, &non_callable);
2148  __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2149  __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2150  RelocInfo::CODE_TARGET, equal);
2151 
2152  __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2153  __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2154  RelocInfo::CODE_TARGET, equal);
2155 
2156  // Check if target has a [[Call]] internal method.
2157  __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2158  Immediate(Map::IsCallableBit::kMask));
2159  __ j(zero, &non_callable, Label::kNear);
2160 
2161  // Check if target is a proxy and call CallProxy external builtin
2162  __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2163  __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET,
2164  equal);
2165 
2166  // 2. Call to something else, which might have a [[Call]] internal method (if
2167  // not we raise an exception).
2168 
2169  // Overwrite the original receiver with the (original) target.
2170  __ movp(args.GetReceiverOperand(), rdi);
2171  // Let the "call_as_function_delegate" take care of the rest.
2172  __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2173  __ Jump(masm->isolate()->builtins()->CallFunction(
2174  ConvertReceiverMode::kNotNullOrUndefined),
2175  RelocInfo::CODE_TARGET);
2176 
2177  // 3. Call to something that is not callable.
2178  __ bind(&non_callable);
2179  {
2180  FrameScope scope(masm, StackFrame::INTERNAL);
2181  __ Push(rdi);
2182  __ CallRuntime(Runtime::kThrowCalledNonCallable);
2183  }
2184 }
2185 
2186 // static
2187 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2188  // ----------- S t a t e -------------
2189  // -- rax : the number of arguments (not including the receiver)
2190  // -- rdx : the new target (checked to be a constructor)
2191  // -- rdi : the constructor to call (checked to be a JSFunction)
2192  // -----------------------------------
2193  __ AssertConstructor(rdi);
2194  __ AssertFunction(rdi);
2195 
2196  // Calling convention for function specific ConstructStubs require
2197  // rbx to contain either an AllocationSite or undefined.
2198  __ LoadRoot(rbx, RootIndex::kUndefinedValue);
2199 
2200  // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2201  __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2202  __ testl(FieldOperand(rcx, SharedFunctionInfo::kFlagsOffset),
2203  Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2204  __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2205  RelocInfo::CODE_TARGET, not_zero);
2206 
2207  __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2208  RelocInfo::CODE_TARGET);
2209 }
2210 
2211 // static
2212 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2213  // ----------- S t a t e -------------
2214  // -- rax : the number of arguments (not including the receiver)
2215  // -- rdx : the new target (checked to be a constructor)
2216  // -- rdi : the constructor to call (checked to be a JSBoundFunction)
2217  // -----------------------------------
2218  __ AssertConstructor(rdi);
2219  __ AssertBoundFunction(rdi);
2220 
2221  // Push the [[BoundArguments]] onto the stack.
2222  Generate_PushBoundArguments(masm);
2223 
2224  // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2225  {
2226  Label done;
2227  __ cmpp(rdi, rdx);
2228  __ j(not_equal, &done, Label::kNear);
2229  __ movp(rdx,
2230  FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2231  __ bind(&done);
2232  }
2233 
2234  // Construct the [[BoundTargetFunction]] via the Construct builtin.
2235  __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2236  __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2237 }
2238 
2239 // static
2240 void Builtins::Generate_Construct(MacroAssembler* masm) {
2241  // ----------- S t a t e -------------
2242  // -- rax : the number of arguments (not including the receiver)
2243  // -- rdx : the new target (either the same as the constructor or
2244  // the JSFunction on which new was invoked initially)
2245  // -- rdi : the constructor to call (can be any Object)
2246  // -----------------------------------
2247  StackArgumentsAccessor args(rsp, rax);
2248 
2249  // Check if target is a Smi.
2250  Label non_constructor;
2251  __ JumpIfSmi(rdi, &non_constructor);
2252 
2253  // Check if target has a [[Construct]] internal method.
2254  __ movq(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
2255  __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2256  Immediate(Map::IsConstructorBit::kMask));
2257  __ j(zero, &non_constructor);
2258 
2259  // Dispatch based on instance type.
2260  __ CmpInstanceType(rcx, JS_FUNCTION_TYPE);
2261  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2262  RelocInfo::CODE_TARGET, equal);
2263 
2264  // Only dispatch to bound functions after checking whether they are
2265  // constructors.
2266  __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2267  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2268  RelocInfo::CODE_TARGET, equal);
2269 
2270  // Only dispatch to proxies after checking whether they are constructors.
2271  __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2272  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy), RelocInfo::CODE_TARGET,
2273  equal);
2274 
2275  // Called Construct on an exotic Object with a [[Construct]] internal method.
2276  {
2277  // Overwrite the original receiver with the (original) target.
2278  __ movp(args.GetReceiverOperand(), rdi);
2279  // Let the "call_as_constructor_delegate" take care of the rest.
2280  __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
2281  __ Jump(masm->isolate()->builtins()->CallFunction(),
2282  RelocInfo::CODE_TARGET);
2283  }
2284 
2285  // Called Construct on an Object that doesn't have a [[Construct]] internal
2286  // method.
2287  __ bind(&non_constructor);
2288  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2289  RelocInfo::CODE_TARGET);
2290 }
2291 
2292 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2293  // Lookup the function in the JavaScript frame.
2294  __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2295  __ movp(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset));
2296 
2297  {
2298  FrameScope scope(masm, StackFrame::INTERNAL);
2299  // Pass function as argument.
2300  __ Push(rax);
2301  __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2302  }
2303 
2304  Label skip;
2305  // If the code object is null, just return to the caller.
2306  __ testp(rax, rax);
2307  __ j(not_equal, &skip, Label::kNear);
2308  __ ret(0);
2309 
2310  __ bind(&skip);
2311 
2312  // Drop the handler frame that is be sitting on top of the actual
2313  // JavaScript frame. This is the case then OSR is triggered from bytecode.
2314  __ leave();
2315 
2316  // Load deoptimization data from the code object.
2317  __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2318 
2319  // Load the OSR entrypoint offset from the deoptimization data.
2320  __ SmiUntag(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
2321  DeoptimizationData::kOsrPcOffsetIndex) -
2322  kHeapObjectTag));
2323 
2324  // Compute the target address = code_obj + header_size + osr_offset
2325  __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
2326 
2327  // Overwrite the return address on the stack.
2328  __ movq(StackOperandForReturnAddress(0), rax);
2329 
2330  // And "return" to the OSR entry point of the function.
2331  __ ret(0);
2332 }
2333 
2334 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2335  // The function index was pushed to the stack by the caller as int32.
2336  __ Pop(r11);
2337  // Convert to Smi for the runtime call.
2338  __ SmiTag(r11, r11);
2339  {
2340  HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2341  FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2342 
2343  // Save all parameter registers (see wasm-linkage.cc). They might be
2344  // overwritten in the runtime call below. We don't have any callee-saved
2345  // registers in wasm, so no need to store anything else.
2346  static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
2347  arraysize(wasm::kGpParamRegisters),
2348  "frame size mismatch");
2349  for (Register reg : wasm::kGpParamRegisters) {
2350  __ Push(reg);
2351  }
2352  static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs ==
2353  arraysize(wasm::kFpParamRegisters),
2354  "frame size mismatch");
2355  __ subp(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2356  int offset = 0;
2357  for (DoubleRegister reg : wasm::kFpParamRegisters) {
2358  __ movdqu(Operand(rsp, offset), reg);
2359  offset += kSimd128Size;
2360  }
2361 
2362  // Push the WASM instance as an explicit argument to WasmCompileLazy.
2363  __ Push(kWasmInstanceRegister);
2364  // Push the function index as second argument.
2365  __ Push(r11);
2366  // Load the correct CEntry builtin from the instance object.
2367  __ movp(rcx, FieldOperand(kWasmInstanceRegister,
2368  WasmInstanceObject::kCEntryStubOffset));
2369  // Initialize the JavaScript context with 0. CEntry will use it to
2370  // set the current context on the isolate.
2371  __ Move(kContextRegister, Smi::zero());
2372  __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, rcx);
2373  // The entrypoint address is the return value.
2374  __ movq(r11, kReturnRegister0);
2375 
2376  // Restore registers.
2377  for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
2378  offset -= kSimd128Size;
2379  __ movdqu(reg, Operand(rsp, offset));
2380  }
2381  DCHECK_EQ(0, offset);
2382  __ addp(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2383  for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
2384  __ Pop(reg);
2385  }
2386  }
2387  // Finally, jump to the entrypoint.
2388  __ jmp(r11);
2389 }
2390 
2391 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2392  SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2393  bool builtin_exit_frame) {
2394  // rax: number of arguments including receiver
2395  // rbx: pointer to C function (C callee-saved)
2396  // rbp: frame pointer of calling JS frame (restored after C call)
2397  // rsp: stack pointer (restored after C call)
2398  // rsi: current context (restored)
2399  //
2400  // If argv_mode == kArgvInRegister:
2401  // r15: pointer to the first argument
2402 
2403 #ifdef _WIN64
2404  // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the
2405  // stack to be aligned to 16 bytes. It only allows a single-word to be
2406  // returned in register rax. Larger return sizes must be written to an address
2407  // passed as a hidden first argument.
2408  const Register kCCallArg0 = rcx;
2409  const Register kCCallArg1 = rdx;
2410  const Register kCCallArg2 = r8;
2411  const Register kCCallArg3 = r9;
2412  const int kArgExtraStackSpace = 2;
2413  const int kMaxRegisterResultSize = 1;
2414 #else
2415  // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results
2416  // are returned in rax, and a struct of two pointers are returned in rax+rdx.
2417  // Larger return sizes must be written to an address passed as a hidden first
2418  // argument.
2419  const Register kCCallArg0 = rdi;
2420  const Register kCCallArg1 = rsi;
2421  const Register kCCallArg2 = rdx;
2422  const Register kCCallArg3 = rcx;
2423  const int kArgExtraStackSpace = 0;
2424  const int kMaxRegisterResultSize = 2;
2425 #endif // _WIN64
2426 
2427  // Enter the exit frame that transitions from JavaScript to C++.
2428  int arg_stack_space =
2429  kArgExtraStackSpace +
2430  (result_size <= kMaxRegisterResultSize ? 0 : result_size);
2431  if (argv_mode == kArgvInRegister) {
2432  DCHECK(save_doubles == kDontSaveFPRegs);
2433  DCHECK(!builtin_exit_frame);
2434  __ EnterApiExitFrame(arg_stack_space);
2435  // Move argc into r14 (argv is already in r15).
2436  __ movp(r14, rax);
2437  } else {
2438  __ EnterExitFrame(
2439  arg_stack_space, save_doubles == kSaveFPRegs,
2440  builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2441  }
2442 
2443  // rbx: pointer to builtin function (C callee-saved).
2444  // rbp: frame pointer of exit frame (restored after C call).
2445  // rsp: stack pointer (restored after C call).
2446  // r14: number of arguments including receiver (C callee-saved).
2447  // r15: argv pointer (C callee-saved).
2448 
2449  // Check stack alignment.
2450  if (FLAG_debug_code) {
2451  __ CheckStackAlignment();
2452  }
2453 
2454  // Call C function. The arguments object will be created by stubs declared by
2455  // DECLARE_RUNTIME_FUNCTION().
2456  if (result_size <= kMaxRegisterResultSize) {
2457  // Pass a pointer to the Arguments object as the first argument.
2458  // Return result in single register (rax), or a register pair (rax, rdx).
2459  __ movp(kCCallArg0, r14); // argc.
2460  __ movp(kCCallArg1, r15); // argv.
2461  __ Move(kCCallArg2, ExternalReference::isolate_address(masm->isolate()));
2462  } else {
2463  DCHECK_LE(result_size, 2);
2464  // Pass a pointer to the result location as the first argument.
2465  __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
2466  // Pass a pointer to the Arguments object as the second argument.
2467  __ movp(kCCallArg1, r14); // argc.
2468  __ movp(kCCallArg2, r15); // argv.
2469  __ Move(kCCallArg3, ExternalReference::isolate_address(masm->isolate()));
2470  }
2471  __ call(rbx);
2472 
2473  if (result_size > kMaxRegisterResultSize) {
2474  // Read result values stored on stack. Result is stored
2475  // above the the two Arguments object slots on Win64.
2476  DCHECK_LE(result_size, 2);
2477  __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
2478  __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
2479  }
2480  // Result is in rax or rdx:rax - do not destroy these registers!
2481 
2482  // Check result for exception sentinel.
2483  Label exception_returned;
2484  __ CompareRoot(rax, RootIndex::kException);
2485  __ j(equal, &exception_returned);
2486 
2487  // Check that there is no pending exception, otherwise we
2488  // should have returned the exception sentinel.
2489  if (FLAG_debug_code) {
2490  Label okay;
2491  __ LoadRoot(r14, RootIndex::kTheHoleValue);
2492  ExternalReference pending_exception_address = ExternalReference::Create(
2493  IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2494  Operand pending_exception_operand =
2495  masm->ExternalReferenceAsOperand(pending_exception_address);
2496  __ cmpp(r14, pending_exception_operand);
2497  __ j(equal, &okay, Label::kNear);
2498  __ int3();
2499  __ bind(&okay);
2500  }
2501 
2502  // Exit the JavaScript to C++ exit frame.
2503  __ LeaveExitFrame(save_doubles == kSaveFPRegs, argv_mode == kArgvOnStack);
2504  __ ret(0);
2505 
2506  // Handling of exception.
2507  __ bind(&exception_returned);
2508 
2509  ExternalReference pending_handler_context_address = ExternalReference::Create(
2510  IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2511  ExternalReference pending_handler_entrypoint_address =
2512  ExternalReference::Create(
2513  IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2514  ExternalReference pending_handler_fp_address = ExternalReference::Create(
2515  IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2516  ExternalReference pending_handler_sp_address = ExternalReference::Create(
2517  IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2518 
2519  // Ask the runtime for help to determine the handler. This will set rax to
2520  // contain the current pending exception, don't clobber it.
2521  ExternalReference find_handler =
2522  ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2523  {
2524  FrameScope scope(masm, StackFrame::MANUAL);
2525  __ movp(arg_reg_1, Immediate(0)); // argc.
2526  __ movp(arg_reg_2, Immediate(0)); // argv.
2527  __ Move(arg_reg_3, ExternalReference::isolate_address(masm->isolate()));
2528  __ PrepareCallCFunction(3);
2529  __ CallCFunction(find_handler, 3);
2530  }
2531  // Retrieve the handler context, SP and FP.
2532  __ movp(rsi,
2533  masm->ExternalReferenceAsOperand(pending_handler_context_address));
2534  __ movp(rsp, masm->ExternalReferenceAsOperand(pending_handler_sp_address));
2535  __ movp(rbp, masm->ExternalReferenceAsOperand(pending_handler_fp_address));
2536 
2537  // If the handler is a JS frame, restore the context to the frame. Note that
2538  // the context will be set to (rsi == 0) for non-JS frames.
2539  Label skip;
2540  __ testp(rsi, rsi);
2541  __ j(zero, &skip, Label::kNear);
2542  __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
2543  __ bind(&skip);
2544 
2545  // Reset the masking register. This is done independent of the underlying
2546  // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
2547  // with both configurations. It is safe to always do this, because the
2548  // underlying register is caller-saved and can be arbitrarily clobbered.
2549  __ ResetSpeculationPoisonRegister();
2550 
2551  // Compute the handler entry address and jump to it.
2552  __ movp(rdi,
2553  masm->ExternalReferenceAsOperand(pending_handler_entrypoint_address));
2554  __ jmp(rdi);
2555 }
2556 
2557 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2558  Label check_negative, process_64_bits, done;
2559 
2560  // Account for return address and saved regs.
2561  const int kArgumentOffset = 4 * kRegisterSize;
2562 
2563  MemOperand mantissa_operand(MemOperand(rsp, kArgumentOffset));
2564  MemOperand exponent_operand(
2565  MemOperand(rsp, kArgumentOffset + kDoubleSize / 2));
2566 
2567  // The result is returned on the stack.
2568  MemOperand return_operand = mantissa_operand;
2569 
2570  Register scratch1 = rbx;
2571 
2572  // Since we must use rcx for shifts below, use some other register (rax)
2573  // to calculate the result if ecx is the requested return register.
2574  Register result_reg = rax;
2575  // Save ecx if it isn't the return register and therefore volatile, or if it
2576  // is the return register, then save the temp register we use in its stead
2577  // for the result.
2578  Register save_reg = rax;
2579  __ pushq(rcx);
2580  __ pushq(scratch1);
2581  __ pushq(save_reg);
2582 
2583  __ movl(scratch1, mantissa_operand);
2584  __ Movsd(kScratchDoubleReg, mantissa_operand);
2585  __ movl(rcx, exponent_operand);
2586 
2587  __ andl(rcx, Immediate(HeapNumber::kExponentMask));
2588  __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
2589  __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
2590  __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
2591  __ j(below, &process_64_bits, Label::kNear);
2592 
2593  // Result is entirely in lower 32-bits of mantissa
2594  int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
2595  __ subl(rcx, Immediate(delta));
2596  __ xorl(result_reg, result_reg);
2597  __ cmpl(rcx, Immediate(31));
2598  __ j(above, &done, Label::kNear);
2599  __ shll_cl(scratch1);
2600  __ jmp(&check_negative, Label::kNear);
2601 
2602  __ bind(&process_64_bits);
2603  __ Cvttsd2siq(result_reg, kScratchDoubleReg);
2604  __ jmp(&done, Label::kNear);
2605 
2606  // If the double was negative, negate the integer result.
2607  __ bind(&check_negative);
2608  __ movl(result_reg, scratch1);
2609  __ negl(result_reg);
2610  __ cmpl(exponent_operand, Immediate(0));
2611  __ cmovl(greater, result_reg, scratch1);
2612 
2613  // Restore registers
2614  __ bind(&done);
2615  __ movl(return_operand, result_reg);
2616  __ popq(save_reg);
2617  __ popq(scratch1);
2618  __ popq(rcx);
2619  __ ret(0);
2620 }
2621 
2622 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2623  const Register exponent = rdx;
2624  const Register scratch = rcx;
2625  const XMMRegister double_result = xmm3;
2626  const XMMRegister double_base = xmm2;
2627  const XMMRegister double_exponent = xmm1;
2628  const XMMRegister double_scratch = xmm4;
2629 
2630  Label call_runtime, done, exponent_not_smi, int_exponent;
2631 
2632  // Save 1 in double_result - we need this several times later on.
2633  __ movp(scratch, Immediate(1));
2634  __ Cvtlsi2sd(double_result, scratch);
2635 
2636  Label fast_power, try_arithmetic_simplification;
2637  // Detect integer exponents stored as double.
2638  __ DoubleToI(exponent, double_exponent, double_scratch,
2639  &try_arithmetic_simplification, &try_arithmetic_simplification);
2640  __ jmp(&int_exponent);
2641 
2642  __ bind(&try_arithmetic_simplification);
2643  __ Cvttsd2si(exponent, double_exponent);
2644  // Skip to runtime if possibly NaN (indicated by the indefinite integer).
2645  __ cmpl(exponent, Immediate(0x1));
2646  __ j(overflow, &call_runtime);
2647 
2648  // Using FPU instructions to calculate power.
2649  Label fast_power_failed;
2650  __ bind(&fast_power);
2651  __ fnclex(); // Clear flags to catch exceptions later.
2652  // Transfer (B)ase and (E)xponent onto the FPU register stack.
2653  __ subp(rsp, Immediate(kDoubleSize));
2654  __ Movsd(Operand(rsp, 0), double_exponent);
2655  __ fld_d(Operand(rsp, 0)); // E
2656  __ Movsd(Operand(rsp, 0), double_base);
2657  __ fld_d(Operand(rsp, 0)); // B, E
2658 
2659  // Exponent is in st(1) and base is in st(0)
2660  // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
2661  // FYL2X calculates st(1) * log2(st(0))
2662  __ fyl2x(); // X
2663  __ fld(0); // X, X
2664  __ frndint(); // rnd(X), X
2665  __ fsub(1); // rnd(X), X-rnd(X)
2666  __ fxch(1); // X - rnd(X), rnd(X)
2667  // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
2668  __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
2669  __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
2670  __ faddp(1); // 2^(X-rnd(X)), rnd(X)
2671  // FSCALE calculates st(0) * 2^st(1)
2672  __ fscale(); // 2^X, rnd(X)
2673  __ fstp(1);
2674  // Bail out to runtime in case of exceptions in the status word.
2675  __ fnstsw_ax();
2676  __ testb(rax, Immediate(0x5F)); // Check for all but precision exception.
2677  __ j(not_zero, &fast_power_failed, Label::kNear);
2678  __ fstp_d(Operand(rsp, 0));
2679  __ Movsd(double_result, Operand(rsp, 0));
2680  __ addp(rsp, Immediate(kDoubleSize));
2681  __ jmp(&done);
2682 
2683  __ bind(&fast_power_failed);
2684  __ fninit();
2685  __ addp(rsp, Immediate(kDoubleSize));
2686  __ jmp(&call_runtime);
2687 
2688  // Calculate power with integer exponent.
2689  __ bind(&int_exponent);
2690  const XMMRegister double_scratch2 = double_exponent;
2691  // Back up exponent as we need to check if exponent is negative later.
2692  __ movp(scratch, exponent); // Back up exponent.
2693  __ Movsd(double_scratch, double_base); // Back up base.
2694  __ Movsd(double_scratch2, double_result); // Load double_exponent with 1.
2695 
2696  // Get absolute value of exponent.
2697  Label no_neg, while_true, while_false;
2698  __ testl(scratch, scratch);
2699  __ j(positive, &no_neg, Label::kNear);
2700  __ negl(scratch);
2701  __ bind(&no_neg);
2702 
2703  __ j(zero, &while_false, Label::kNear);
2704  __ shrl(scratch, Immediate(1));
2705  // Above condition means CF==0 && ZF==0. This means that the
2706  // bit that has been shifted out is 0 and the result is not 0.
2707  __ j(above, &while_true, Label::kNear);
2708  __ Movsd(double_result, double_scratch);
2709  __ j(zero, &while_false, Label::kNear);
2710 
2711  __ bind(&while_true);
2712  __ shrl(scratch, Immediate(1));
2713  __ Mulsd(double_scratch, double_scratch);
2714  __ j(above, &while_true, Label::kNear);
2715  __ Mulsd(double_result, double_scratch);
2716  __ j(not_zero, &while_true);
2717 
2718  __ bind(&while_false);
2719  // If the exponent is negative, return 1/result.
2720  __ testl(exponent, exponent);
2721  __ j(greater, &done);
2722  __ Divsd(double_scratch2, double_result);
2723  __ Movsd(double_result, double_scratch2);
2724  // Test whether result is zero. Bail out to check for subnormal result.
2725  // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
2726  __ Xorpd(double_scratch2, double_scratch2);
2727  __ Ucomisd(double_scratch2, double_result);
2728  // double_exponent aliased as double_scratch2 has already been overwritten
2729  // and may not have contained the exponent value in the first place when the
2730  // input was a smi. We reset it with exponent value before bailing out.
2731  __ j(not_equal, &done);
2732  __ Cvtlsi2sd(double_exponent, exponent);
2733 
2734  // Returning or bailing out.
2735  __ bind(&call_runtime);
2736  // Move base to the correct argument register. Exponent is already in xmm1.
2737  __ Movsd(xmm0, double_base);
2738  DCHECK(double_exponent == xmm1);
2739  {
2740  AllowExternalCallThatCantCauseGC scope(masm);
2741  __ PrepareCallCFunction(2);
2742  __ CallCFunction(ExternalReference::power_double_double_function(), 2);
2743  }
2744  // Return value is in xmm0.
2745  __ Movsd(double_result, xmm0);
2746 
2747  __ bind(&done);
2748  __ ret(0);
2749 }
2750 
2751 namespace {
2752 
2753 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2754  ElementsKind kind) {
2755  Label not_zero_case, not_one_case;
2756  Label normal_sequence;
2757 
2758  __ testp(rax, rax);
2759  __ j(not_zero, &not_zero_case);
2760  __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2761  .code(),
2762  RelocInfo::CODE_TARGET);
2763 
2764  __ bind(&not_zero_case);
2765  __ cmpl(rax, Immediate(1));
2766  __ j(greater, &not_one_case);
2767 
2768  if (IsFastPackedElementsKind(kind)) {
2769  // We might need to create a holey array
2770  // look at the first argument
2771  StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2772  __ movp(rcx, args.GetArgumentOperand(0));
2773  __ testp(rcx, rcx);
2774  __ j(zero, &normal_sequence);
2775 
2776  __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2777  masm->isolate(), GetHoleyElementsKind(kind))
2778  .code(),
2779  RelocInfo::CODE_TARGET);
2780  }
2781 
2782  __ bind(&normal_sequence);
2783  __ Jump(
2784  CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2785  .code(),
2786  RelocInfo::CODE_TARGET);
2787 
2788  __ bind(&not_one_case);
2789  // Load undefined into the allocation site parameter as required by
2790  // ArrayNArgumentsConstructor.
2791  __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue);
2792  Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
2793  __ Jump(code, RelocInfo::CODE_TARGET);
2794 }
2795 
2796 } // namespace
2797 
2798 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2799  // ----------- S t a t e -------------
2800  // -- rax : argc
2801  // -- rdi : constructor
2802  // -- rsp[0] : return address
2803  // -- rsp[8] : last argument
2804  // -----------------------------------
2805 
2806  if (FLAG_debug_code) {
2807  // The array construct code is only set for the global and natives
2808  // builtin Array functions which always have maps.
2809 
2810  // Initial map for the builtin Array function should be a map.
2811  __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2812  // Will both indicate a nullptr and a Smi.
2813  STATIC_ASSERT(kSmiTag == 0);
2814  Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
2815  __ Check(not_smi, AbortReason::kUnexpectedInitialMapForArrayFunction);
2816  __ CmpObjectType(rcx, MAP_TYPE, rcx);
2817  __ Check(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
2818  }
2819 
2820  // Figure out the right elements kind
2821  __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2822 
2823  // Load the map's "bit field 2" into |result|. We only need the first byte,
2824  // but the following masking takes care of that anyway.
2825  __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
2826  // Retrieve elements_kind from bit field 2.
2827  __ DecodeField<Map::ElementsKindBits>(rcx);
2828 
2829  if (FLAG_debug_code) {
2830  Label done;
2831  __ cmpl(rcx, Immediate(PACKED_ELEMENTS));
2832  __ j(equal, &done);
2833  __ cmpl(rcx, Immediate(HOLEY_ELEMENTS));
2834  __ Assert(
2835  equal,
2836  AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2837  __ bind(&done);
2838  }
2839 
2840  Label fast_elements_case;
2841  __ cmpl(rcx, Immediate(PACKED_ELEMENTS));
2842  __ j(equal, &fast_elements_case);
2843  GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2844 
2845  __ bind(&fast_elements_case);
2846  GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2847 }
2848 
2849 #undef __
2850 
2851 } // namespace internal
2852 } // namespace v8
2853 
2854 #endif // V8_TARGET_ARCH_X64
Definition: libplatform.h:13