V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
builtins-ia32.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_IA32
6 
7 #include "src/base/adapters.h"
8 #include "src/code-factory.h"
9 #include "src/counters.h"
10 #include "src/debug/debug.h"
11 #include "src/deoptimizer.h"
12 #include "src/frame-constants.h"
13 #include "src/frames.h"
14 #include "src/objects-inl.h"
15 #include "src/objects/js-generator.h"
16 #include "src/objects/smi.h"
17 #include "src/register-configuration.h"
18 #include "src/wasm/wasm-linkage.h"
19 #include "src/wasm/wasm-objects.h"
20 
21 namespace v8 {
22 namespace internal {
23 
24 #define __ ACCESS_MASM(masm)
25 
26 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
27  ExitFrameType exit_frame_type) {
28  __ Move(kJavaScriptCallExtraArg1Register,
29  Immediate(ExternalReference::Create(address)));
30  if (exit_frame_type == BUILTIN_EXIT) {
31  __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
32  RelocInfo::CODE_TARGET);
33  } else {
34  DCHECK(exit_frame_type == EXIT);
35  __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
36  RelocInfo::CODE_TARGET);
37  }
38 }
39 
40 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
41  Runtime::FunctionId function_id) {
42  // ----------- S t a t e -------------
43  // -- eax : argument count (preserved for callee)
44  // -- edx : new target (preserved for callee)
45  // -- edi : target function (preserved for callee)
46  // -----------------------------------
47  {
48  FrameScope scope(masm, StackFrame::INTERNAL);
49  // Push the number of arguments to the callee.
50  __ SmiTag(eax);
51  __ push(eax);
52  // Push a copy of the target function and the new target.
53  __ push(edi);
54  __ push(edx);
55  // Function is also the parameter to the runtime call.
56  __ push(edi);
57 
58  __ CallRuntime(function_id, 1);
59  __ mov(ecx, eax);
60 
61  // Restore target function and new target.
62  __ pop(edx);
63  __ pop(edi);
64  __ pop(eax);
65  __ SmiUntag(eax);
66  }
67 
68  static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
69  __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
70  __ jmp(ecx);
71 }
72 
73 namespace {
74 
75 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
76  // ----------- S t a t e -------------
77  // -- eax: number of arguments
78  // -- edi: constructor function
79  // -- edx: new target
80  // -- esi: context
81  // -----------------------------------
82 
83  // Enter a construct frame.
84  {
85  FrameScope scope(masm, StackFrame::CONSTRUCT);
86 
87  // Preserve the incoming parameters on the stack.
88  __ SmiTag(eax);
89  __ push(esi);
90  __ push(eax);
91  __ SmiUntag(eax);
92 
93  // The receiver for the builtin/api call.
94  __ PushRoot(RootIndex::kTheHoleValue);
95 
96  // Set up pointer to last argument. We are using esi as scratch register.
97  __ lea(esi, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
98 
99  // Copy arguments and receiver to the expression stack.
100  Label loop, entry;
101  __ mov(ecx, eax);
102  // ----------- S t a t e -------------
103  // -- eax: number of arguments (untagged)
104  // -- edi: constructor function
105  // -- edx: new target
106  // -- esi: pointer to last argument
107  // -- ecx: counter
108  // -- sp[0*kPointerSize]: the hole (receiver)
109  // -- sp[1*kPointerSize]: number of arguments (tagged)
110  // -- sp[2*kPointerSize]: context
111  // -----------------------------------
112  __ jmp(&entry);
113  __ bind(&loop);
114  __ push(Operand(esi, ecx, times_4, 0));
115  __ bind(&entry);
116  __ dec(ecx);
117  __ j(greater_equal, &loop);
118 
119  // Call the function.
120  // eax: number of arguments (untagged)
121  // edi: constructor function
122  // edx: new target
123  ParameterCount actual(eax);
124  // Reload context from the frame.
125  __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
126  __ InvokeFunction(edi, edx, actual, CALL_FUNCTION);
127 
128  // Restore context from the frame.
129  __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
130  // Restore smi-tagged arguments count from the frame.
131  __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
132  // Leave construct frame.
133  }
134 
135  // Remove caller arguments from the stack and return.
136  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
137  __ PopReturnAddressTo(ecx);
138  __ lea(esp, Operand(esp, edx, times_2, 1 * kPointerSize)); // 1 ~ receiver
139  __ PushReturnAddressFrom(ecx);
140  __ ret(0);
141 }
142 
143 void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
144  Register scratch, Label* stack_overflow,
145  bool include_receiver = false) {
146  // Check the stack for overflow. We are not trying to catch
147  // interruptions (e.g. debug break and preemption) here, so the "real stack
148  // limit" is checked.
149  ExternalReference real_stack_limit =
150  ExternalReference::address_of_real_stack_limit(masm->isolate());
151  // Compute the space that is left as a negative number in scratch. If
152  // we already overflowed, this will be a positive number.
153  __ mov(scratch, __ ExternalReferenceAsOperand(real_stack_limit, scratch));
154  __ sub(scratch, esp);
155  // Add the size of the arguments.
156  static_assert(kPointerSize == 4,
157  "The next instruction assumes kPointerSize == 4");
158  __ lea(scratch, Operand(scratch, num_args, times_4, 0));
159  if (include_receiver) {
160  __ add(scratch, Immediate(kPointerSize));
161  }
162  // See if we overflowed, i.e. scratch is positive.
163  __ cmp(scratch, Immediate(0));
164  __ j(greater, stack_overflow); // Signed comparison.
165 }
166 
167 } // namespace
168 
169 // The construct stub for ES5 constructor functions and ES6 class constructors.
170 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
171  // ----------- S t a t e -------------
172  // -- eax: number of arguments (untagged)
173  // -- edi: constructor function
174  // -- edx: new target
175  // -- esi: context
176  // -- sp[...]: constructor arguments
177  // -----------------------------------
178 
179  // Enter a construct frame.
180  {
181  FrameScope scope(masm, StackFrame::CONSTRUCT);
182  Label post_instantiation_deopt_entry, not_create_implicit_receiver;
183 
184  // Preserve the incoming parameters on the stack.
185  __ mov(ecx, eax);
186  __ SmiTag(ecx);
187  __ Push(esi);
188  __ Push(ecx);
189  __ Push(edi);
190  __ PushRoot(RootIndex::kTheHoleValue);
191  __ Push(edx);
192 
193  // ----------- S t a t e -------------
194  // -- sp[0*kPointerSize]: new target
195  // -- sp[1*kPointerSize]: padding
196  // -- edi and sp[2*kPointerSize]: constructor function
197  // -- sp[3*kPointerSize]: argument count
198  // -- sp[4*kPointerSize]: context
199  // -----------------------------------
200 
201  __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
202  __ test(FieldOperand(eax, SharedFunctionInfo::kFlagsOffset),
203  Immediate(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
204  __ j(not_zero, &not_create_implicit_receiver);
205 
206  // If not derived class constructor: Allocate the new receiver object.
207  __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
208  eax);
209  __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
210  RelocInfo::CODE_TARGET);
211  __ jmp(&post_instantiation_deopt_entry, Label::kNear);
212 
213  // Else: use TheHoleValue as receiver for constructor call
214  __ bind(&not_create_implicit_receiver);
215  __ LoadRoot(eax, RootIndex::kTheHoleValue);
216 
217  // ----------- S t a t e -------------
218  // -- eax: implicit receiver
219  // -- Slot 4 / sp[0*kPointerSize]: new target
220  // -- Slot 3 / sp[1*kPointerSize]: padding
221  // -- Slot 2 / sp[2*kPointerSize]: constructor function
222  // -- Slot 1 / sp[3*kPointerSize]: number of arguments (tagged)
223  // -- Slot 0 / sp[4*kPointerSize]: context
224  // -----------------------------------
225  // Deoptimizer enters here.
226  masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
227  masm->pc_offset());
228  __ bind(&post_instantiation_deopt_entry);
229 
230  // Restore new target.
231  __ Pop(edx);
232 
233  // Push the allocated receiver to the stack. We need two copies
234  // because we may have to return the original one and the calling
235  // conventions dictate that the called function pops the receiver.
236  __ Push(eax);
237  __ Push(eax);
238 
239  // ----------- S t a t e -------------
240  // -- edx: new target
241  // -- sp[0*kPointerSize]: implicit receiver
242  // -- sp[1*kPointerSize]: implicit receiver
243  // -- sp[2*kPointerSize]: padding
244  // -- sp[3*kPointerSize]: constructor function
245  // -- sp[4*kPointerSize]: number of arguments (tagged)
246  // -- sp[5*kPointerSize]: context
247  // -----------------------------------
248 
249  // Restore argument count.
250  __ mov(eax, Operand(ebp, ConstructFrameConstants::kLengthOffset));
251  __ SmiUntag(eax);
252 
253  // Set up pointer to last argument.
254  __ lea(edi, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
255 
256  // Check if we have enough stack space to push all arguments.
257  // Argument count in eax. Clobbers ecx.
258  Label enough_stack_space, stack_overflow;
259  Generate_StackOverflowCheck(masm, eax, ecx, &stack_overflow);
260  __ jmp(&enough_stack_space);
261 
262  __ bind(&stack_overflow);
263  // Restore context from the frame.
264  __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
265  __ CallRuntime(Runtime::kThrowStackOverflow);
266  // This should be unreachable.
267  __ int3();
268 
269  __ bind(&enough_stack_space);
270 
271  // Copy arguments and receiver to the expression stack.
272  Label loop, entry;
273  __ mov(ecx, eax);
274  // ----------- S t a t e -------------
275  // -- eax: number of arguments (untagged)
276  // -- edx: new target
277  // -- edi: pointer to last argument
278  // -- ecx: counter (tagged)
279  // -- sp[0*kPointerSize]: implicit receiver
280  // -- sp[1*kPointerSize]: implicit receiver
281  // -- sp[2*kPointerSize]: padding
282  // -- sp[3*kPointerSize]: constructor function
283  // -- sp[4*kPointerSize]: number of arguments (tagged)
284  // -- sp[5*kPointerSize]: context
285  // -----------------------------------
286  __ jmp(&entry, Label::kNear);
287  __ bind(&loop);
288  __ Push(Operand(edi, ecx, times_pointer_size, 0));
289  __ bind(&entry);
290  __ dec(ecx);
291  __ j(greater_equal, &loop);
292 
293  // Restore and and call the constructor function.
294  __ mov(edi, Operand(ebp, ConstructFrameConstants::kConstructorOffset));
295  ParameterCount actual(eax);
296  __ InvokeFunction(edi, edx, actual, CALL_FUNCTION);
297 
298  // ----------- S t a t e -------------
299  // -- eax: constructor result
300  // -- sp[0*kPointerSize]: implicit receiver
301  // -- sp[1*kPointerSize]: padding
302  // -- sp[2*kPointerSize]: constructor function
303  // -- sp[3*kPointerSize]: number of arguments
304  // -- sp[4*kPointerSize]: context
305  // -----------------------------------
306 
307  // Store offset of return address for deoptimizer.
308  masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
309  masm->pc_offset());
310 
311  // Restore context from the frame.
312  __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
313 
314  // If the result is an object (in the ECMA sense), we should get rid
315  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
316  // on page 74.
317  Label use_receiver, do_throw, leave_frame;
318 
319  // If the result is undefined, we jump out to using the implicit receiver.
320  __ JumpIfRoot(eax, RootIndex::kUndefinedValue, &use_receiver, Label::kNear);
321 
322  // Otherwise we do a smi check and fall through to check if the return value
323  // is a valid receiver.
324 
325  // If the result is a smi, it is *not* an object in the ECMA sense.
326  __ JumpIfSmi(eax, &use_receiver, Label::kNear);
327 
328  // If the type of the result (stored in its map) is less than
329  // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
330  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
331  __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
332  __ j(above_equal, &leave_frame, Label::kNear);
333  __ jmp(&use_receiver, Label::kNear);
334 
335  __ bind(&do_throw);
336  __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
337 
338  // Throw away the result of the constructor invocation and use the
339  // on-stack receiver as the result.
340  __ bind(&use_receiver);
341  __ mov(eax, Operand(esp, 0 * kPointerSize));
342  __ JumpIfRoot(eax, RootIndex::kTheHoleValue, &do_throw);
343 
344  __ bind(&leave_frame);
345  // Restore smi-tagged arguments count from the frame.
346  __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
347  // Leave construct frame.
348  }
349  // Remove caller arguments from the stack and return.
350  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
351  __ pop(ecx);
352  __ lea(esp, Operand(esp, edx, times_2, 1 * kPointerSize)); // 1 ~ receiver
353  __ push(ecx);
354  __ ret(0);
355 }
356 
357 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
358  Generate_JSBuiltinsConstructStubHelper(masm);
359 }
360 
361 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
362  FrameScope scope(masm, StackFrame::INTERNAL);
363  __ push(edi);
364  __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
365 }
366 
367 
368 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
369  bool is_construct) {
370  {
371  FrameScope scope(masm, StackFrame::INTERNAL);
372 
373  const Register scratch1 = edx;
374  const Register scratch2 = edi;
375 
376  // Setup the context (we need to use the caller context from the isolate).
377  ExternalReference context_address = ExternalReference::Create(
378  IsolateAddressId::kContextAddress, masm->isolate());
379  __ mov(esi, __ ExternalReferenceAsOperand(context_address, scratch1));
380 
381  // Load the previous frame pointer (edx) to access C arguments
382  __ mov(scratch1, Operand(ebp, 0));
383 
384  // Push the function and the receiver onto the stack.
385  __ push(Operand(scratch1, EntryFrameConstants::kFunctionArgOffset));
386  __ push(Operand(scratch1, EntryFrameConstants::kReceiverArgOffset));
387 
388  // Load the number of arguments and setup pointer to the arguments.
389  __ mov(eax, Operand(scratch1, EntryFrameConstants::kArgcOffset));
390  __ mov(scratch1, Operand(scratch1, EntryFrameConstants::kArgvOffset));
391 
392  // Check if we have enough stack space to push all arguments.
393  // Argument count in eax. Clobbers ecx.
394  Label enough_stack_space, stack_overflow;
395  Generate_StackOverflowCheck(masm, eax, ecx, &stack_overflow);
396  __ jmp(&enough_stack_space);
397 
398  __ bind(&stack_overflow);
399  __ CallRuntime(Runtime::kThrowStackOverflow);
400  // This should be unreachable.
401  __ int3();
402 
403  __ bind(&enough_stack_space);
404 
405  // Copy arguments to the stack in a loop.
406  Label loop, entry;
407  __ Move(ecx, Immediate(0));
408  __ jmp(&entry, Label::kNear);
409  __ bind(&loop);
410  // Push the parameter from argv.
411  __ mov(scratch2, Operand(scratch1, ecx, times_4, 0));
412  __ push(Operand(scratch2, 0)); // dereference handle
413  __ inc(ecx);
414  __ bind(&entry);
415  __ cmp(ecx, eax);
416  __ j(not_equal, &loop);
417 
418  // Load the previous frame pointer (ebx) to access C arguments
419  __ mov(scratch2, Operand(ebp, 0));
420 
421  // Get the new.target and function from the frame.
422  __ mov(edx, Operand(scratch2, EntryFrameConstants::kNewTargetArgOffset));
423  __ mov(edi, Operand(scratch2, EntryFrameConstants::kFunctionArgOffset));
424 
425  // Invoke the code.
426  Handle<Code> builtin = is_construct
427  ? BUILTIN_CODE(masm->isolate(), Construct)
428  : masm->isolate()->builtins()->Call();
429  __ Call(builtin, RelocInfo::CODE_TARGET);
430 
431  // Exit the internal frame. Notice that this also removes the empty.
432  // context and the function left on the stack by the code
433  // invocation.
434  }
435  __ ret(0);
436 }
437 
438 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
439  Generate_JSEntryTrampolineHelper(masm, false);
440 }
441 
442 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
443  Generate_JSEntryTrampolineHelper(masm, true);
444 }
445 
446 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
447  Register sfi_data,
448  Register scratch1) {
449  Label done;
450 
451  __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
452  __ j(not_equal, &done, Label::kNear);
453  __ mov(sfi_data,
454  FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
455 
456  __ bind(&done);
457 }
458 
459 // static
460 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
461  // ----------- S t a t e -------------
462  // -- eax : the value to pass to the generator
463  // -- edx : the JSGeneratorObject to resume
464  // -- esp[0] : return address
465  // -----------------------------------
466  __ AssertGeneratorObject(edx);
467 
468  // Store input value into generator object.
469  __ mov(FieldOperand(edx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
470  __ RecordWriteField(edx, JSGeneratorObject::kInputOrDebugPosOffset, eax, ecx,
471  kDontSaveFPRegs);
472 
473  // Load suspended function and context.
474  __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
475  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
476 
477  // Flood function if we are stepping.
478  Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
479  Label stepping_prepared;
480  ExternalReference debug_hook =
481  ExternalReference::debug_hook_on_function_call_address(masm->isolate());
482  __ cmpb(__ ExternalReferenceAsOperand(debug_hook, ecx), Immediate(0));
483  __ j(not_equal, &prepare_step_in_if_stepping);
484 
485  // Flood function if we need to continue stepping in the suspended generator.
486  ExternalReference debug_suspended_generator =
487  ExternalReference::debug_suspended_generator_address(masm->isolate());
488  __ cmp(edx, __ ExternalReferenceAsOperand(debug_suspended_generator, ecx));
489  __ j(equal, &prepare_step_in_suspended_generator);
490  __ bind(&stepping_prepared);
491 
492  // Check the stack for overflow. We are not trying to catch interruptions
493  // (i.e. debug break and preemption) here, so check the "real stack limit".
494  Label stack_overflow;
495  __ CompareRealStackLimit(esp);
496  __ j(below, &stack_overflow);
497 
498  // Pop return address.
499  __ PopReturnAddressTo(eax);
500 
501  // Push receiver.
502  __ Push(FieldOperand(edx, JSGeneratorObject::kReceiverOffset));
503 
504  // ----------- S t a t e -------------
505  // -- eax : return address
506  // -- edx : the JSGeneratorObject to resume
507  // -- edi : generator function
508  // -- esi : generator context
509  // -- esp[0] : generator receiver
510  // -----------------------------------
511 
512  {
513  __ movd(xmm0, ebx);
514 
515  // Copy the function arguments from the generator object's register file.
516  __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
517  __ movzx_w(ecx, FieldOperand(
518  ecx, SharedFunctionInfo::kFormalParameterCountOffset));
519  __ mov(ebx,
520  FieldOperand(edx, JSGeneratorObject::kParametersAndRegistersOffset));
521  {
522  Label done_loop, loop;
523  __ Set(edi, 0);
524 
525  __ bind(&loop);
526  __ cmp(edi, ecx);
527  __ j(greater_equal, &done_loop);
528  __ Push(
529  FieldOperand(ebx, edi, times_pointer_size, FixedArray::kHeaderSize));
530  __ add(edi, Immediate(1));
531  __ jmp(&loop);
532 
533  __ bind(&done_loop);
534  }
535 
536  // Restore registers.
537  __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
538  __ movd(ebx, xmm0);
539  }
540 
541  // Underlying function needs to have bytecode available.
542  if (FLAG_debug_code) {
543  __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
544  __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
545  __ Push(eax);
546  GetSharedFunctionInfoBytecode(masm, ecx, eax);
547  __ Pop(eax);
548  __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, ecx);
549  __ Assert(equal, AbortReason::kMissingBytecodeArray);
550  }
551 
552  // Resume (Ignition/TurboFan) generator object.
553  {
554  __ PushReturnAddressFrom(eax);
555  __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
556  __ movzx_w(eax, FieldOperand(
557  eax, SharedFunctionInfo::kFormalParameterCountOffset));
558  // We abuse new.target both to indicate that this is a resume call and to
559  // pass in the generator object. In ordinary calls, new.target is always
560  // undefined because generator functions are non-constructable.
561  static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
562  __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
563  __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
564  __ jmp(ecx);
565  }
566 
567  __ bind(&prepare_step_in_if_stepping);
568  {
569  FrameScope scope(masm, StackFrame::INTERNAL);
570  __ Push(edx);
571  __ Push(edi);
572  // Push hole as receiver since we do not use it for stepping.
573  __ PushRoot(RootIndex::kTheHoleValue);
574  __ CallRuntime(Runtime::kDebugOnFunctionCall);
575  __ Pop(edx);
576  __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
577  }
578  __ jmp(&stepping_prepared);
579 
580  __ bind(&prepare_step_in_suspended_generator);
581  {
582  FrameScope scope(masm, StackFrame::INTERNAL);
583  __ Push(edx);
584  __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
585  __ Pop(edx);
586  __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
587  }
588  __ jmp(&stepping_prepared);
589 
590  __ bind(&stack_overflow);
591  {
592  FrameScope scope(masm, StackFrame::INTERNAL);
593  __ CallRuntime(Runtime::kThrowStackOverflow);
594  __ int3(); // This should be unreachable.
595  }
596 }
597 
598 static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm,
599  Register optimized_code,
600  Register closure,
601  Register scratch1,
602  Register scratch2) {
603  // Store the optimized code in the closure.
604  __ mov(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
605  __ mov(scratch1, optimized_code); // Write barrier clobbers scratch1 below.
606  __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
607  kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
608 }
609 
610 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
611  Register scratch2) {
612  Register args_count = scratch1;
613  Register return_pc = scratch2;
614 
615  // Get the arguments + receiver count.
616  __ mov(args_count,
617  Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
618  __ mov(args_count,
619  FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
620 
621  // Leave the frame (also dropping the register file).
622  __ leave();
623 
624  // Drop receiver + arguments.
625  __ pop(return_pc);
626  __ add(esp, args_count);
627  __ push(return_pc);
628 }
629 
630 // Tail-call |function_id| if |smi_entry| == |marker|
631 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
632  Register smi_entry,
633  OptimizationMarker marker,
634  Runtime::FunctionId function_id) {
635  Label no_match;
636  __ cmp(smi_entry, Immediate(Smi::FromEnum(marker)));
637  __ j(not_equal, &no_match, Label::kNear);
638  GenerateTailCallToReturnedCode(masm, function_id);
639  __ bind(&no_match);
640 }
641 
642 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
643  Register scratch) {
644  // ----------- S t a t e -------------
645  // -- eax : argument count (preserved for callee if needed, and caller)
646  // -- edx : new target (preserved for callee if needed, and caller)
647  // -- edi : target function (preserved for callee if needed, and caller)
648  // -- ecx : feedback vector (also used as scratch, value is not preserved)
649  // -----------------------------------
650  DCHECK(!AreAliased(eax, edx, edi, scratch));
651 
652  Label optimized_code_slot_is_weak_ref, fallthrough;
653 
654  Register closure = edi;
655  // Scratch contains feedback_vector.
656  Register feedback_vector = scratch;
657 
658  // Load the optimized code from the feedback vector and re-use the register.
659  Register optimized_code_entry = scratch;
660  __ mov(optimized_code_entry,
661  FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
662 
663  // Check if the code entry is a Smi. If yes, we interpret it as an
664  // optimisation marker. Otherwise, interpret it as a weak reference to a code
665  // object.
666  __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
667 
668  {
669  // Optimized code slot is an optimization marker.
670 
671  // Fall through if no optimization trigger.
672  __ cmp(optimized_code_entry,
673  Immediate(Smi::FromEnum(OptimizationMarker::kNone)));
674  __ j(equal, &fallthrough);
675 
676  // TODO(v8:8394): The logging of first execution will break if
677  // feedback vectors are not allocated. We need to find a different way of
678  // logging these events if required.
679  TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
680  OptimizationMarker::kLogFirstExecution,
681  Runtime::kFunctionFirstExecution);
682  TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
683  OptimizationMarker::kCompileOptimized,
684  Runtime::kCompileOptimized_NotConcurrent);
685  TailCallRuntimeIfMarkerEquals(
686  masm, optimized_code_entry,
687  OptimizationMarker::kCompileOptimizedConcurrent,
688  Runtime::kCompileOptimized_Concurrent);
689 
690  {
691  // Otherwise, the marker is InOptimizationQueue, so fall through hoping
692  // that an interrupt will eventually update the slot with optimized code.
693  if (FLAG_debug_code) {
694  __ cmp(
695  optimized_code_entry,
696  Immediate(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
697  __ Assert(equal, AbortReason::kExpectedOptimizationSentinel);
698  }
699  __ jmp(&fallthrough);
700  }
701  }
702 
703  {
704  // Optimized code slot is a weak reference.
705  __ bind(&optimized_code_slot_is_weak_ref);
706 
707  __ LoadWeakValue(optimized_code_entry, &fallthrough);
708 
709  __ push(eax);
710  __ push(edx);
711 
712  // Check if the optimized code is marked for deopt. If it is, bailout to a
713  // given label.
714  Label found_deoptimized_code;
715  __ mov(eax,
716  FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
717  __ test(FieldOperand(eax, CodeDataContainer::kKindSpecificFlagsOffset),
718  Immediate(1 << Code::kMarkedForDeoptimizationBit));
719  __ j(not_zero, &found_deoptimized_code);
720 
721  // Optimized code is good, get it into the closure and link the closure into
722  // the optimized functions list, then tail call the optimized code.
723  ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
724  edx, eax);
725  static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
726  __ Move(ecx, optimized_code_entry);
727  __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
728  __ pop(edx);
729  __ pop(eax);
730  __ jmp(ecx);
731 
732  // Optimized code slot contains deoptimized code, evict it and re-enter the
733  // closure's code.
734  __ bind(&found_deoptimized_code);
735  __ pop(edx);
736  __ pop(eax);
737  GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
738  }
739 
740  // Fall-through if the optimized code cell is clear and there is no
741  // optimization marker.
742  __ bind(&fallthrough);
743 }
744 
745 // Advance the current bytecode offset. This simulates what all bytecode
746 // handlers do upon completion of the underlying operation. Will bail out to a
747 // label if the bytecode (without prefix) is a return bytecode.
748 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
749  Register bytecode_array,
750  Register bytecode_offset,
751  Register scratch1, Register scratch2,
752  Label* if_return) {
753  Register bytecode_size_table = scratch1;
754  Register bytecode = scratch2;
755  DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
756  bytecode));
757  __ Move(bytecode_size_table,
758  Immediate(ExternalReference::bytecode_size_table_address()));
759 
760  // Load the current bytecode.
761  __ movzx_b(bytecode, Operand(kInterpreterBytecodeArrayRegister,
762  kInterpreterBytecodeOffsetRegister, times_1, 0));
763 
764  // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
765  Label process_bytecode, extra_wide;
766  STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
767  STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
768  STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
769  STATIC_ASSERT(3 ==
770  static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
771  __ cmp(bytecode, Immediate(0x3));
772  __ j(above, &process_bytecode, Label::kNear);
773  __ test(bytecode, Immediate(0x1));
774  __ j(not_equal, &extra_wide, Label::kNear);
775 
776  // Load the next bytecode and update table to the wide scaled table.
777  __ inc(bytecode_offset);
778  __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
779  __ add(bytecode_size_table,
780  Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
781  __ jmp(&process_bytecode, Label::kNear);
782 
783  __ bind(&extra_wide);
784  // Load the next bytecode and update table to the extra wide scaled table.
785  __ inc(bytecode_offset);
786  __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
787  __ add(bytecode_size_table,
788  Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
789 
790  __ bind(&process_bytecode);
791 
792 // Bailout to the return label if this is a return bytecode.
793 #define JUMP_IF_EQUAL(NAME) \
794  __ cmp(bytecode, \
795  Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
796  __ j(equal, if_return);
797  RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
798 #undef JUMP_IF_EQUAL
799 
800  // Otherwise, load the size of the current bytecode and advance the offset.
801  __ add(bytecode_offset, Operand(bytecode_size_table, bytecode, times_4, 0));
802 }
803 
804 // Generate code for entering a JS function with the interpreter.
805 // On entry to the function the receiver and arguments have been pushed on the
806 // stack left to right. The actual argument count matches the formal parameter
807 // count expected by the function.
808 //
809 // The live registers are:
810 // o edi: the JS function object being called
811 // o edx: the incoming new target or generator object
812 // o esi: our context
813 // o ebp: the caller's frame pointer
814 // o esp: stack pointer (pointing to return address)
815 //
816 // The function builds an interpreter frame. See InterpreterFrameConstants in
817 // frames.h for its layout.
818 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
819  Register closure = edi;
820 
821  Register feedback_vector = ecx;
822  Label push_stack_frame;
823  // Load feedback vector and check if it is valid. If valid, check for
824  // optimized code and update invocation count. Otherwise, setup the stack
825  // frame.
826  __ mov(feedback_vector,
827  FieldOperand(closure, JSFunction::kFeedbackCellOffset));
828  __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
829  __ JumpIfRoot(feedback_vector, RootIndex::kUndefinedValue, &push_stack_frame);
830 
831  // Read off the optimized code slot in the closure's feedback vector, and if
832  // there is optimized code or an optimization marker, call that instead.
833  MaybeTailCallOptimizedCodeSlot(masm, ecx);
834 
835  // Load the feedback vector and increment the invocation count.
836  __ mov(feedback_vector,
837  FieldOperand(closure, JSFunction::kFeedbackCellOffset));
838  __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
839  __ inc(FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
840 
841  // Open a frame scope to indicate that there is a frame on the stack. The
842  // MANUAL indicates that the scope shouldn't actually generate code to set
843  // up the frame (that is done below).
844  __ bind(&push_stack_frame);
845  FrameScope frame_scope(masm, StackFrame::MANUAL);
846  __ push(ebp); // Caller's frame pointer.
847  __ mov(ebp, esp);
848  __ push(esi); // Callee's context.
849  __ push(edi); // Callee's JS function.
850 
851  // Get the bytecode array from the function object and load it into
852  // kInterpreterBytecodeArrayRegister.
853  __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
854  __ mov(kInterpreterBytecodeArrayRegister,
855  FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
856  __ Push(eax);
857  GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, eax);
858  __ Pop(eax);
859 
860  // Check function data field is actually a BytecodeArray object.
861  if (FLAG_debug_code) {
862  __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
863  __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
864  eax);
865  __ Assert(
866  equal,
867  AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
868  }
869 
870  // Reset code age.
871  __ mov_b(FieldOperand(kInterpreterBytecodeArrayRegister,
872  BytecodeArray::kBytecodeAgeOffset),
873  Immediate(BytecodeArray::kNoAgeBytecodeAge));
874 
875  // Push bytecode array.
876  __ push(kInterpreterBytecodeArrayRegister);
877  // Push Smi tagged initial bytecode array offset.
878  __ push(Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag)));
879 
880  // Allocate the local and temporary register file on the stack.
881  {
882  // Load frame size from the BytecodeArray object.
883  Register frame_size = ecx;
884  __ mov(frame_size, FieldOperand(kInterpreterBytecodeArrayRegister,
885  BytecodeArray::kFrameSizeOffset));
886 
887  // Do a stack check to ensure we don't go over the limit.
888  Label ok;
889  __ mov(eax, esp);
890  __ sub(eax, frame_size);
891  __ CompareRealStackLimit(eax);
892  __ j(above_equal, &ok);
893  __ CallRuntime(Runtime::kThrowStackOverflow);
894  __ bind(&ok);
895 
896  // If ok, push undefined as the initial value for all register file entries.
897  Label loop_header;
898  Label loop_check;
899  __ Move(eax, masm->isolate()->factory()->undefined_value());
900  __ jmp(&loop_check);
901  __ bind(&loop_header);
902  // TODO(rmcilroy): Consider doing more than one push per loop iteration.
903  __ push(eax);
904  // Continue loop if not done.
905  __ bind(&loop_check);
906  __ sub(frame_size, Immediate(kPointerSize));
907  __ j(greater_equal, &loop_header);
908  }
909 
910  // If the bytecode array has a valid incoming new target or generator object
911  // register, initialize it with incoming value which was passed in edx.
912  Label no_incoming_new_target_or_generator_register;
913  __ mov(eax, FieldOperand(
914  kInterpreterBytecodeArrayRegister,
915  BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
916  __ test(eax, eax);
917  __ j(zero, &no_incoming_new_target_or_generator_register);
918  __ mov(Operand(ebp, eax, times_pointer_size, 0), edx);
919  __ bind(&no_incoming_new_target_or_generator_register);
920 
921  // Load accumulator and bytecode offset into registers.
922  __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
923  __ mov(kInterpreterBytecodeOffsetRegister,
924  Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
925 
926  // Load the dispatch table into a register and dispatch to the bytecode
927  // handler at the current bytecode offset.
928  Label do_dispatch;
929  __ bind(&do_dispatch);
930  __ Move(kInterpreterDispatchTableRegister,
931  Immediate(ExternalReference::interpreter_dispatch_table_address(
932  masm->isolate())));
933  __ movzx_b(ecx, Operand(kInterpreterBytecodeArrayRegister,
934  kInterpreterBytecodeOffsetRegister, times_1, 0));
935  __ mov(
936  kJavaScriptCallCodeStartRegister,
937  Operand(kInterpreterDispatchTableRegister, ecx, times_pointer_size, 0));
938  __ call(kJavaScriptCallCodeStartRegister);
939  masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
940 
941  // Any returns to the entry trampoline are either due to the return bytecode
942  // or the interpreter tail calling a builtin and then a dispatch.
943 
944  // Get bytecode array and bytecode offset from the stack frame.
945  __ mov(kInterpreterBytecodeArrayRegister,
946  Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
947  __ mov(kInterpreterBytecodeOffsetRegister,
948  Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
949  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
950 
951  // Either return, or advance to the next bytecode and dispatch.
952  Label do_return;
953  AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
954  kInterpreterBytecodeOffsetRegister, ecx,
955  kInterpreterDispatchTableRegister, &do_return);
956  __ jmp(&do_dispatch);
957 
958  __ bind(&do_return);
959  // The return value is in eax.
960  LeaveInterpreterFrame(masm, edx, ecx);
961  __ ret(0);
962 }
963 
964 
965 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
966  Register array_limit,
967  Register start_address) {
968  // ----------- S t a t e -------------
969  // -- start_address : Pointer to the last argument in the args array.
970  // -- array_limit : Pointer to one before the first argument in the
971  // args array.
972  // -----------------------------------
973  Label loop_header, loop_check;
974  __ jmp(&loop_check);
975  __ bind(&loop_header);
976  __ Push(Operand(start_address, 0));
977  __ sub(start_address, Immediate(kPointerSize));
978  __ bind(&loop_check);
979  __ cmp(start_address, array_limit);
980  __ j(greater, &loop_header, Label::kNear);
981 }
982 
983 // static
984 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
985  MacroAssembler* masm, ConvertReceiverMode receiver_mode,
986  InterpreterPushArgsMode mode) {
987  DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
988  // ----------- S t a t e -------------
989  // -- eax : the number of arguments (not including the receiver)
990  // -- ecx : the address of the first argument to be pushed. Subsequent
991  // arguments should be consecutive above this, in the same order as
992  // they are to be pushed onto the stack.
993  // -- edi : the target to call (can be any Object).
994  // -----------------------------------
995 
996  const Register scratch = edx;
997  const Register argv = ecx;
998 
999  Label stack_overflow;
1000  // Add a stack check before pushing the arguments.
1001  Generate_StackOverflowCheck(masm, eax, scratch, &stack_overflow, true);
1002 
1003  __ movd(xmm0, eax); // Spill number of arguments.
1004 
1005  // Compute the expected number of arguments.
1006  __ mov(scratch, eax);
1007  __ add(scratch, Immediate(1)); // Add one for receiver.
1008 
1009  // Pop return address to allow tail-call after pushing arguments.
1010  __ PopReturnAddressTo(eax);
1011 
1012  // Push "undefined" as the receiver arg if we need to.
1013  if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1014  __ PushRoot(RootIndex::kUndefinedValue);
1015  __ sub(scratch, Immediate(1)); // Subtract one for receiver.
1016  }
1017 
1018  // Find the address of the last argument.
1019  __ shl(scratch, kPointerSizeLog2);
1020  __ neg(scratch);
1021  __ add(scratch, argv);
1022  Generate_InterpreterPushArgs(masm, scratch, argv);
1023 
1024  // Call the target.
1025 
1026  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1027  __ Pop(ecx); // Pass the spread in a register
1028  __ PushReturnAddressFrom(eax);
1029  __ movd(eax, xmm0); // Restore number of arguments.
1030  __ sub(eax, Immediate(1)); // Subtract one for spread
1031  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1032  RelocInfo::CODE_TARGET);
1033  } else {
1034  __ PushReturnAddressFrom(eax);
1035  __ movd(eax, xmm0); // Restore number of arguments.
1036  __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1037  RelocInfo::CODE_TARGET);
1038  }
1039 
1040  __ bind(&stack_overflow);
1041  {
1042  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1043 
1044  // This should be unreachable.
1045  __ int3();
1046  }
1047 }
1048 
1049 namespace {
1050 
1051 // This function modifies start_addr, and only reads the contents of num_args
1052 // register. scratch1 and scratch2 are used as temporary registers.
1053 void Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1054  MacroAssembler* masm, Register num_args, Register start_addr,
1055  Register scratch1, Register scratch2, int num_slots_to_move,
1056  Label* stack_overflow) {
1057  // We have to move return address and the temporary registers above it
1058  // before we can copy arguments onto the stack. To achieve this:
1059  // Step 1: Increment the stack pointer by num_args + 1 (for receiver).
1060  // Step 2: Move the return address and values around it to the top of stack.
1061  // Step 3: Copy the arguments into the correct locations.
1062  // current stack =====> required stack layout
1063  // | | | return addr | (2) <-- esp (1)
1064  // | | | addtl. slot |
1065  // | | | arg N | (3)
1066  // | | | .... |
1067  // | | | arg 1 |
1068  // | return addr | <-- esp | arg 0 |
1069  // | addtl. slot | | receiver slot |
1070 
1071  // Check for stack overflow before we increment the stack pointer.
1072  Generate_StackOverflowCheck(masm, num_args, scratch1, stack_overflow, true);
1073 
1074  // Step 1 - Update the stack pointer.
1075 
1076  __ lea(scratch1, Operand(num_args, times_4, kPointerSize));
1077  __ AllocateStackFrame(scratch1);
1078 
1079  // Step 2 move return_address and slots around it to the correct locations.
1080  // Move from top to bottom, otherwise we may overwrite when num_args = 0 or 1,
1081  // basically when the source and destination overlap. We at least need one
1082  // extra slot for receiver, so no extra checks are required to avoid copy.
1083  for (int i = 0; i < num_slots_to_move + 1; i++) {
1084  __ mov(scratch1,
1085  Operand(esp, num_args, times_pointer_size, (i + 1) * kPointerSize));
1086  __ mov(Operand(esp, i * kPointerSize), scratch1);
1087  }
1088 
1089  // Step 3 copy arguments to correct locations.
1090  // Slot meant for receiver contains return address. Reset it so that
1091  // we will not incorrectly interpret return address as an object.
1092  __ mov(Operand(esp, num_args, times_pointer_size,
1093  (num_slots_to_move + 1) * kPointerSize),
1094  Immediate(0));
1095  __ mov(scratch1, num_args);
1096 
1097  Label loop_header, loop_check;
1098  __ jmp(&loop_check);
1099  __ bind(&loop_header);
1100  __ mov(scratch2, Operand(start_addr, 0));
1101  __ mov(Operand(esp, scratch1, times_pointer_size,
1102  num_slots_to_move * kPointerSize),
1103  scratch2);
1104  __ sub(start_addr, Immediate(kPointerSize));
1105  __ sub(scratch1, Immediate(1));
1106  __ bind(&loop_check);
1107  __ cmp(scratch1, Immediate(0));
1108  __ j(greater, &loop_header, Label::kNear);
1109 }
1110 
1111 } // end anonymous namespace
1112 
1113 // static
1114 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1115  MacroAssembler* masm, InterpreterPushArgsMode mode) {
1116  // ----------- S t a t e -------------
1117  // -- eax : the number of arguments (not including the receiver)
1118  // -- ecx : the address of the first argument to be pushed. Subsequent
1119  // arguments should be consecutive above this, in the same order
1120  // as they are to be pushed onto the stack.
1121  // -- esp[0] : return address
1122  // -- esp[4] : allocation site feedback (if available or undefined)
1123  // -- esp[8] : the new target
1124  // -- esp[12] : the constructor
1125  // -----------------------------------
1126 
1127  Label stack_overflow;
1128 
1129  // Push arguments and move return address and stack spill slots to the top of
1130  // stack. The eax register is readonly. The ecx register will be modified. edx
1131  // and edi are used as scratch registers.
1132  Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1133  masm, eax, ecx, edx, edi,
1134  InterpreterPushArgsThenConstructDescriptor::kStackArgumentsCount,
1135  &stack_overflow);
1136 
1137  // Call the appropriate constructor. eax and ecx already contain intended
1138  // values, remaining registers still need to be initialized from the stack.
1139 
1140  if (mode == InterpreterPushArgsMode::kArrayFunction) {
1141  // Tail call to the array construct stub (still in the caller context at
1142  // this point).
1143 
1144  __ movd(xmm0, eax); // Spill number of arguments.
1145  __ PopReturnAddressTo(eax);
1146  __ Pop(kJavaScriptCallExtraArg1Register);
1147  __ Pop(kJavaScriptCallNewTargetRegister);
1148  __ Pop(kJavaScriptCallTargetRegister);
1149  __ PushReturnAddressFrom(eax);
1150 
1151  __ AssertFunction(kJavaScriptCallTargetRegister);
1152  __ AssertUndefinedOrAllocationSite(kJavaScriptCallExtraArg1Register, eax);
1153 
1154  __ movd(eax, xmm0); // Reload number of arguments.
1155  __ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
1156  RelocInfo::CODE_TARGET);
1157  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1158  __ movd(xmm0, eax); // Spill number of arguments.
1159  __ PopReturnAddressTo(eax);
1160  __ Drop(1); // The allocation site is unused.
1161  __ Pop(kJavaScriptCallNewTargetRegister);
1162  __ Pop(kJavaScriptCallTargetRegister);
1163  __ Pop(ecx); // Pop the spread (i.e. the first argument), overwriting ecx.
1164  __ PushReturnAddressFrom(eax);
1165  __ movd(eax, xmm0); // Reload number of arguments.
1166  __ sub(eax, Immediate(1)); // The actual argc thus decrements by one.
1167 
1168  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1169  RelocInfo::CODE_TARGET);
1170  } else {
1171  DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1172  __ PopReturnAddressTo(ecx);
1173  __ Drop(1); // The allocation site is unused.
1174  __ Pop(kJavaScriptCallNewTargetRegister);
1175  __ Pop(kJavaScriptCallTargetRegister);
1176  __ PushReturnAddressFrom(ecx);
1177 
1178  __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1179  }
1180 
1181  __ bind(&stack_overflow);
1182  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1183  __ int3();
1184 }
1185 
1186 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1187  // Set the return address to the correct point in the interpreter entry
1188  // trampoline.
1189  Label builtin_trampoline, trampoline_loaded;
1190  Smi interpreter_entry_return_pc_offset(
1191  masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1192  DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1193 
1194  static constexpr Register scratch = ecx;
1195 
1196  // If the SFI function_data is an InterpreterData, the function will have a
1197  // custom copy of the interpreter entry trampoline for profiling. If so,
1198  // get the custom trampoline, otherwise grab the entry address of the global
1199  // trampoline.
1200  __ mov(scratch, Operand(ebp, StandardFrameConstants::kFunctionOffset));
1201  __ mov(scratch, FieldOperand(scratch, JSFunction::kSharedFunctionInfoOffset));
1202  __ mov(scratch,
1203  FieldOperand(scratch, SharedFunctionInfo::kFunctionDataOffset));
1204  __ Push(eax);
1205  __ CmpObjectType(scratch, INTERPRETER_DATA_TYPE, eax);
1206  __ j(not_equal, &builtin_trampoline, Label::kNear);
1207 
1208  __ mov(scratch,
1209  FieldOperand(scratch, InterpreterData::kInterpreterTrampolineOffset));
1210  __ add(scratch, Immediate(Code::kHeaderSize - kHeapObjectTag));
1211  __ jmp(&trampoline_loaded, Label::kNear);
1212 
1213  __ bind(&builtin_trampoline);
1214  __ mov(scratch,
1215  __ ExternalReferenceAsOperand(
1216  ExternalReference::
1217  address_of_interpreter_entry_trampoline_instruction_start(
1218  masm->isolate()),
1219  scratch));
1220 
1221  __ bind(&trampoline_loaded);
1222  __ Pop(eax);
1223  __ add(scratch, Immediate(interpreter_entry_return_pc_offset->value()));
1224  __ push(scratch);
1225 
1226  // Initialize the dispatch table register.
1227  __ Move(kInterpreterDispatchTableRegister,
1228  Immediate(ExternalReference::interpreter_dispatch_table_address(
1229  masm->isolate())));
1230 
1231  // Get the bytecode array pointer from the frame.
1232  __ mov(kInterpreterBytecodeArrayRegister,
1233  Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1234 
1235  if (FLAG_debug_code) {
1236  // Check function data field is actually a BytecodeArray object.
1237  __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1238  __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1239  scratch);
1240  __ Assert(
1241  equal,
1242  AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1243  }
1244 
1245  // Get the target bytecode offset from the frame.
1246  __ mov(kInterpreterBytecodeOffsetRegister,
1247  Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1248  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1249 
1250  // Dispatch to the target bytecode.
1251  __ movzx_b(scratch, Operand(kInterpreterBytecodeArrayRegister,
1252  kInterpreterBytecodeOffsetRegister, times_1, 0));
1253  __ mov(kJavaScriptCallCodeStartRegister,
1254  Operand(kInterpreterDispatchTableRegister, scratch, times_pointer_size,
1255  0));
1256  __ jmp(kJavaScriptCallCodeStartRegister);
1257 }
1258 
1259 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1260  // Get bytecode array and bytecode offset from the stack frame.
1261  __ mov(kInterpreterBytecodeArrayRegister,
1262  Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1263  __ mov(kInterpreterBytecodeOffsetRegister,
1264  Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1265  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1266 
1267  // Advance to the next bytecode.
1268  Label if_return;
1269  AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1270  kInterpreterBytecodeOffsetRegister, ecx, esi,
1271  &if_return);
1272 
1273  // Convert new bytecode offset to a Smi and save in the stackframe.
1274  __ mov(ecx, kInterpreterBytecodeOffsetRegister);
1275  __ SmiTag(ecx);
1276  __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp), ecx);
1277 
1278  Generate_InterpreterEnterBytecode(masm);
1279 
1280  // We should never take the if_return path.
1281  __ bind(&if_return);
1282  __ Abort(AbortReason::kInvalidBytecodeAdvance);
1283 }
1284 
1285 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1286  Generate_InterpreterEnterBytecode(masm);
1287 }
1288 
1289 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1290  // ----------- S t a t e -------------
1291  // -- eax : argument count (preserved for callee)
1292  // -- edx : new target (preserved for callee)
1293  // -- edi : target function (preserved for callee)
1294  // -----------------------------------
1295  Label failed;
1296  {
1297  FrameScope scope(masm, StackFrame::INTERNAL);
1298  // Preserve argument count for later compare.
1299  __ mov(ecx, eax);
1300  // Push the number of arguments to the callee.
1301  __ SmiTag(eax);
1302  __ push(eax);
1303  // Push a copy of the target function and the new target.
1304  __ push(edi);
1305  __ push(edx);
1306 
1307  // The function.
1308  __ push(edi);
1309  // Copy arguments from caller (stdlib, foreign, heap).
1310  Label args_done;
1311  for (int j = 0; j < 4; ++j) {
1312  Label over;
1313  if (j < 3) {
1314  __ cmp(ecx, Immediate(j));
1315  __ j(not_equal, &over, Label::kNear);
1316  }
1317  for (int i = j - 1; i >= 0; --i) {
1318  __ Push(Operand(
1319  ebp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
1320  }
1321  for (int i = 0; i < 3 - j; ++i) {
1322  __ PushRoot(RootIndex::kUndefinedValue);
1323  }
1324  if (j < 3) {
1325  __ jmp(&args_done, Label::kNear);
1326  __ bind(&over);
1327  }
1328  }
1329  __ bind(&args_done);
1330 
1331  // Call runtime, on success unwind frame, and parent frame.
1332  __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1333  // A smi 0 is returned on failure, an object on success.
1334  __ JumpIfSmi(eax, &failed, Label::kNear);
1335 
1336  __ Drop(2);
1337  __ Pop(ecx);
1338  __ SmiUntag(ecx);
1339  scope.GenerateLeaveFrame();
1340 
1341  __ PopReturnAddressTo(edx);
1342  __ inc(ecx);
1343  __ lea(esp, Operand(esp, ecx, times_pointer_size, 0));
1344  __ PushReturnAddressFrom(edx);
1345  __ ret(0);
1346 
1347  __ bind(&failed);
1348  // Restore target function and new target.
1349  __ pop(edx);
1350  __ pop(edi);
1351  __ pop(eax);
1352  __ SmiUntag(eax);
1353  }
1354  // On failure, tail call back to regular js by re-calling the function
1355  // which has be reset to the compile lazy builtin.
1356  static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
1357  __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
1358  __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1359  __ jmp(ecx);
1360 }
1361 
1362 namespace {
1363 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1364  bool java_script_builtin,
1365  bool with_result) {
1366  const RegisterConfiguration* config(RegisterConfiguration::Default());
1367  int allocatable_register_count = config->num_allocatable_general_registers();
1368  if (with_result) {
1369  // Overwrite the hole inserted by the deoptimizer with the return value from
1370  // the LAZY deopt point.
1371  __ mov(Operand(esp,
1372  config->num_allocatable_general_registers() * kPointerSize +
1373  BuiltinContinuationFrameConstants::kFixedFrameSize),
1374  eax);
1375  }
1376  for (int i = allocatable_register_count - 1; i >= 0; --i) {
1377  int code = config->GetAllocatableGeneralCode(i);
1378  __ pop(Register::from_code(code));
1379  if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1380  __ SmiUntag(Register::from_code(code));
1381  }
1382  }
1383  __ mov(
1384  ebp,
1385  Operand(esp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1386  const int offsetToPC =
1387  BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp - kPointerSize;
1388  __ pop(Operand(esp, offsetToPC));
1389  __ Drop(offsetToPC / kPointerSize);
1390  __ add(Operand(esp, 0), Immediate(Code::kHeaderSize - kHeapObjectTag));
1391  __ ret(0);
1392 }
1393 } // namespace
1394 
1395 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1396  Generate_ContinueToBuiltinHelper(masm, false, false);
1397 }
1398 
1399 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1400  MacroAssembler* masm) {
1401  Generate_ContinueToBuiltinHelper(masm, false, true);
1402 }
1403 
1404 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1405  Generate_ContinueToBuiltinHelper(masm, true, false);
1406 }
1407 
1408 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1409  MacroAssembler* masm) {
1410  Generate_ContinueToBuiltinHelper(masm, true, true);
1411 }
1412 
1413 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1414  {
1415  FrameScope scope(masm, StackFrame::INTERNAL);
1416  __ CallRuntime(Runtime::kNotifyDeoptimized);
1417  // Tear down internal frame.
1418  }
1419 
1420  DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
1421  __ mov(eax, Operand(esp, 1 * kPointerSize));
1422  __ ret(1 * kPointerSize); // Remove eax.
1423 }
1424 
1425 // static
1426 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1427  // ----------- S t a t e -------------
1428  // -- eax : argc
1429  // -- esp[0] : return address
1430  // -- esp[4] : argArray
1431  // -- esp[8] : thisArg
1432  // -- esp[12] : receiver
1433  // -----------------------------------
1434 
1435  // 1. Load receiver into xmm0, argArray into edx (if present), remove all
1436  // arguments from the stack (including the receiver), and push thisArg (if
1437  // present) instead.
1438  {
1439  Label no_arg_array, no_this_arg;
1440  // Spill receiver to allow the usage of edi as a scratch register.
1441  __ movd(xmm0, Operand(esp, eax, times_pointer_size, kPointerSize));
1442 
1443  __ LoadRoot(edx, RootIndex::kUndefinedValue);
1444  __ mov(edi, edx);
1445  __ test(eax, eax);
1446  __ j(zero, &no_this_arg, Label::kNear);
1447  {
1448  __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
1449  __ cmp(eax, Immediate(1));
1450  __ j(equal, &no_arg_array, Label::kNear);
1451  __ mov(edx, Operand(esp, eax, times_pointer_size, -kPointerSize));
1452  __ bind(&no_arg_array);
1453  }
1454  __ bind(&no_this_arg);
1455  __ PopReturnAddressTo(ecx);
1456  __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1457  __ Push(edi);
1458  __ PushReturnAddressFrom(ecx);
1459 
1460  // Restore receiver to edi.
1461  __ movd(edi, xmm0);
1462  }
1463 
1464  // ----------- S t a t e -------------
1465  // -- edx : argArray
1466  // -- edi : receiver
1467  // -- esp[0] : return address
1468  // -- esp[4] : thisArg
1469  // -----------------------------------
1470 
1471  // 2. We don't need to check explicitly for callable receiver here,
1472  // since that's the first thing the Call/CallWithArrayLike builtins
1473  // will do.
1474 
1475  // 3. Tail call with no arguments if argArray is null or undefined.
1476  Label no_arguments;
1477  __ JumpIfRoot(edx, RootIndex::kNullValue, &no_arguments, Label::kNear);
1478  __ JumpIfRoot(edx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
1479 
1480  // 4a. Apply the receiver to the given argArray.
1481  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1482  RelocInfo::CODE_TARGET);
1483 
1484  // 4b. The argArray is either null or undefined, so we tail call without any
1485  // arguments to the receiver.
1486  __ bind(&no_arguments);
1487  {
1488  __ Set(eax, 0);
1489  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1490  }
1491 }
1492 
1493 // static
1494 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1495  // Stack Layout:
1496  // esp[0] : Return address
1497  // esp[8] : Argument n
1498  // esp[16] : Argument n-1
1499  // ...
1500  // esp[8 * n] : Argument 1
1501  // esp[8 * (n + 1)] : Receiver (callable to call)
1502  //
1503  // eax contains the number of arguments, n, not counting the receiver.
1504  //
1505  // 1. Make sure we have at least one argument.
1506  {
1507  Label done;
1508  __ test(eax, eax);
1509  __ j(not_zero, &done, Label::kNear);
1510  __ PopReturnAddressTo(edx);
1511  __ PushRoot(RootIndex::kUndefinedValue);
1512  __ PushReturnAddressFrom(edx);
1513  __ inc(eax);
1514  __ bind(&done);
1515  }
1516 
1517  // 2. Get the callable to call (passed as receiver) from the stack.
1518  __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1519 
1520  // 3. Shift arguments and return address one slot down on the stack
1521  // (overwriting the original receiver). Adjust argument count to make
1522  // the original first argument the new receiver.
1523  {
1524  Label loop;
1525  __ mov(ecx, eax);
1526  __ bind(&loop);
1527  __ mov(edx, Operand(esp, ecx, times_pointer_size, 0));
1528  __ mov(Operand(esp, ecx, times_pointer_size, kPointerSize), edx);
1529  __ dec(ecx);
1530  __ j(not_sign, &loop); // While non-negative (to copy return address).
1531  __ pop(edx); // Discard copy of return address.
1532  __ dec(eax); // One fewer argument (first argument is new receiver).
1533  }
1534 
1535  // 4. Call the callable.
1536  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1537 }
1538 
1539 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1540  // ----------- S t a t e -------------
1541  // -- eax : argc
1542  // -- esp[0] : return address
1543  // -- esp[4] : argumentsList
1544  // -- esp[8] : thisArgument
1545  // -- esp[12] : target
1546  // -- esp[16] : receiver
1547  // -----------------------------------
1548 
1549  // 1. Load target into edi (if present), argumentsList into edx (if present),
1550  // remove all arguments from the stack (including the receiver), and push
1551  // thisArgument (if present) instead.
1552  {
1553  Label done;
1554  __ LoadRoot(edi, RootIndex::kUndefinedValue);
1555  __ mov(edx, edi);
1556  __ mov(ecx, edi);
1557  __ cmp(eax, Immediate(1));
1558  __ j(below, &done, Label::kNear);
1559  __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1560  __ j(equal, &done, Label::kNear);
1561  __ mov(ecx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1562  __ cmp(eax, Immediate(3));
1563  __ j(below, &done, Label::kNear);
1564  __ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1565  __ bind(&done);
1566 
1567  // Spill argumentsList to use edx as a scratch register.
1568  __ movd(xmm0, edx);
1569 
1570  __ PopReturnAddressTo(edx);
1571  __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1572  __ Push(ecx);
1573  __ PushReturnAddressFrom(edx);
1574 
1575  // Restore argumentsList.
1576  __ movd(edx, xmm0);
1577  }
1578 
1579  // ----------- S t a t e -------------
1580  // -- edx : argumentsList
1581  // -- edi : target
1582  // -- esp[0] : return address
1583  // -- esp[4] : thisArgument
1584  // -----------------------------------
1585 
1586  // 2. We don't need to check explicitly for callable target here,
1587  // since that's the first thing the Call/CallWithArrayLike builtins
1588  // will do.
1589 
1590  // 3. Apply the target to the given argumentsList.
1591  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1592  RelocInfo::CODE_TARGET);
1593 }
1594 
1595 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1596  // ----------- S t a t e -------------
1597  // -- eax : argc
1598  // -- esp[0] : return address
1599  // -- esp[4] : new.target (optional)
1600  // -- esp[8] : argumentsList
1601  // -- esp[12] : target
1602  // -- esp[16] : receiver
1603  // -----------------------------------
1604 
1605  // 1. Load target into edi (if present), argumentsList into ecx (if present),
1606  // new.target into edx (if present, otherwise use target), remove all
1607  // arguments from the stack (including the receiver), and push thisArgument
1608  // (if present) instead.
1609  {
1610  Label done;
1611  __ LoadRoot(edi, RootIndex::kUndefinedValue);
1612  __ mov(edx, edi);
1613  __ mov(ecx, edi);
1614  __ cmp(eax, Immediate(1));
1615  __ j(below, &done, Label::kNear);
1616  __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1617  __ mov(edx, edi);
1618  __ j(equal, &done, Label::kNear);
1619  __ mov(ecx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1620  __ cmp(eax, Immediate(3));
1621  __ j(below, &done, Label::kNear);
1622  __ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1623  __ bind(&done);
1624 
1625  // Spill argumentsList to use ecx as a scratch register.
1626  __ movd(xmm0, ecx);
1627 
1628  __ PopReturnAddressTo(ecx);
1629  __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1630  __ PushRoot(RootIndex::kUndefinedValue);
1631  __ PushReturnAddressFrom(ecx);
1632 
1633  // Restore argumentsList.
1634  __ movd(ecx, xmm0);
1635  }
1636 
1637  // ----------- S t a t e -------------
1638  // -- ecx : argumentsList
1639  // -- edx : new.target
1640  // -- edi : target
1641  // -- esp[0] : return address
1642  // -- esp[4] : receiver (undefined)
1643  // -----------------------------------
1644 
1645  // 2. We don't need to check explicitly for constructor target here,
1646  // since that's the first thing the Construct/ConstructWithArrayLike
1647  // builtins will do.
1648 
1649  // 3. We don't need to check explicitly for constructor new.target here,
1650  // since that's the second thing the Construct/ConstructWithArrayLike
1651  // builtins will do.
1652 
1653  // 4. Construct the target with the given new.target and argumentsList.
1654  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1655  RelocInfo::CODE_TARGET);
1656 }
1657 
1658 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
1659  // ----------- S t a t e -------------
1660  // -- eax : argc
1661  // -- esp[0] : return address
1662  // -- esp[4] : last argument
1663  // -----------------------------------
1664  Label generic_array_code;
1665 
1666  if (FLAG_debug_code) {
1667  // Initial map for the builtin InternalArray function should be a map.
1668  __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1669  // Will both indicate a nullptr and a Smi.
1670  __ test(ecx, Immediate(kSmiTagMask));
1671  __ Assert(not_zero,
1672  AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1673  __ CmpObjectType(ecx, MAP_TYPE, ecx);
1674  __ Assert(equal,
1675  AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1676  }
1677 
1678  // Run the native code for the InternalArray function called as a normal
1679  // function.
1680  __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
1681  RelocInfo::CODE_TARGET);
1682 }
1683 
1684 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1685  __ push(ebp);
1686  __ mov(ebp, esp);
1687 
1688  // Store the arguments adaptor context sentinel.
1689  __ push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1690 
1691  // Push the function on the stack.
1692  __ push(edi);
1693 
1694  // Preserve the number of arguments on the stack. Must preserve eax,
1695  // ebx and ecx because these registers are used when copying the
1696  // arguments and the receiver.
1697  STATIC_ASSERT(kSmiTagSize == 1);
1698  __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
1699  __ push(edi);
1700 
1701  __ Push(Immediate(0)); // Padding.
1702 }
1703 
1704 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1705  // Retrieve the number of arguments from the stack.
1706  __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1707 
1708  // Leave the frame.
1709  __ leave();
1710 
1711  // Remove caller arguments from the stack.
1712  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1713  __ PopReturnAddressTo(ecx);
1714  __ lea(esp, Operand(esp, edi, times_2, 1 * kPointerSize)); // 1 ~ receiver
1715  __ PushReturnAddressFrom(ecx);
1716 }
1717 
1718 // static
1719 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1720  Handle<Code> code) {
1721  // ----------- S t a t e -------------
1722  // -- edi : target
1723  // -- esi : context for the Call / Construct builtin
1724  // -- eax : number of parameters on the stack (not including the receiver)
1725  // -- ecx : len (number of elements to from args)
1726  // -- ecx : new.target (checked to be constructor or undefined)
1727  // -- esp[4] : arguments list (a FixedArray)
1728  // -- esp[0] : return address.
1729  // -----------------------------------
1730 
1731  // We need to preserve eax, edi, esi and ebx.
1732  __ movd(xmm0, edx);
1733  __ movd(xmm1, edi);
1734  __ movd(xmm2, eax);
1735  __ movd(xmm3, esi); // Spill the context.
1736 
1737  const Register kArgumentsList = esi;
1738  const Register kArgumentsLength = ecx;
1739 
1740  __ PopReturnAddressTo(edx);
1741  __ pop(kArgumentsList);
1742  __ PushReturnAddressFrom(edx);
1743 
1744  if (masm->emit_debug_code()) {
1745  // Allow kArgumentsList to be a FixedArray, or a FixedDoubleArray if
1746  // kArgumentsLength == 0.
1747  Label ok, fail;
1748  __ AssertNotSmi(kArgumentsList);
1749  __ mov(edx, FieldOperand(kArgumentsList, HeapObject::kMapOffset));
1750  __ CmpInstanceType(edx, FIXED_ARRAY_TYPE);
1751  __ j(equal, &ok);
1752  __ CmpInstanceType(edx, FIXED_DOUBLE_ARRAY_TYPE);
1753  __ j(not_equal, &fail);
1754  __ cmp(kArgumentsLength, 0);
1755  __ j(equal, &ok);
1756  // Fall through.
1757  __ bind(&fail);
1758  __ Abort(AbortReason::kOperandIsNotAFixedArray);
1759 
1760  __ bind(&ok);
1761  }
1762 
1763  // Check the stack for overflow. We are not trying to catch interruptions
1764  // (i.e. debug break and preemption) here, so check the "real stack limit".
1765  Label stack_overflow;
1766  Generate_StackOverflowCheck(masm, kArgumentsLength, edx, &stack_overflow);
1767 
1768  // Push additional arguments onto the stack.
1769  {
1770  __ PopReturnAddressTo(edx);
1771  __ Move(eax, Immediate(0));
1772  Label done, push, loop;
1773  __ bind(&loop);
1774  __ cmp(eax, kArgumentsLength);
1775  __ j(equal, &done, Label::kNear);
1776  // Turn the hole into undefined as we go.
1777  __ mov(edi, FieldOperand(kArgumentsList, eax, times_pointer_size,
1778  FixedArray::kHeaderSize));
1779  __ CompareRoot(edi, RootIndex::kTheHoleValue);
1780  __ j(not_equal, &push, Label::kNear);
1781  __ LoadRoot(edi, RootIndex::kUndefinedValue);
1782  __ bind(&push);
1783  __ Push(edi);
1784  __ inc(eax);
1785  __ jmp(&loop);
1786  __ bind(&done);
1787  __ PushReturnAddressFrom(edx);
1788  }
1789 
1790  // Restore eax, edi and edx.
1791  __ movd(esi, xmm3); // Restore the context.
1792  __ movd(eax, xmm2);
1793  __ movd(edi, xmm1);
1794  __ movd(edx, xmm0);
1795 
1796  // Compute the actual parameter count.
1797  __ add(eax, kArgumentsLength);
1798 
1799  // Tail-call to the actual Call or Construct builtin.
1800  __ Jump(code, RelocInfo::CODE_TARGET);
1801 
1802  __ bind(&stack_overflow);
1803  __ movd(esi, xmm3); // Restore the context.
1804  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1805 }
1806 
1807 // static
1808 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1809  CallOrConstructMode mode,
1810  Handle<Code> code) {
1811  // ----------- S t a t e -------------
1812  // -- eax : the number of arguments (not including the receiver)
1813  // -- edi : the target to call (can be any Object)
1814  // -- esi : context for the Call / Construct builtin
1815  // -- edx : the new target (for [[Construct]] calls)
1816  // -- ecx : start index (to support rest parameters)
1817  // -----------------------------------
1818 
1819  __ movd(xmm0, esi); // Spill the context.
1820 
1821  Register scratch = esi;
1822 
1823  // Check if new.target has a [[Construct]] internal method.
1824  if (mode == CallOrConstructMode::kConstruct) {
1825  Label new_target_constructor, new_target_not_constructor;
1826  __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
1827  __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
1828  __ test_b(FieldOperand(scratch, Map::kBitFieldOffset),
1829  Immediate(Map::IsConstructorBit::kMask));
1830  __ j(not_zero, &new_target_constructor, Label::kNear);
1831  __ bind(&new_target_not_constructor);
1832  {
1833  FrameScope scope(masm, StackFrame::MANUAL);
1834  __ EnterFrame(StackFrame::INTERNAL);
1835  __ Push(edx);
1836  __ movd(esi, xmm0); // Restore the context.
1837  __ CallRuntime(Runtime::kThrowNotConstructor);
1838  }
1839  __ bind(&new_target_constructor);
1840  }
1841 
1842  __ movd(xmm1, edx); // Preserve new.target (in case of [[Construct]]).
1843 
1844  // Check if we have an arguments adaptor frame below the function frame.
1845  Label arguments_adaptor, arguments_done;
1846  __ mov(scratch, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1847  __ cmp(Operand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset),
1848  Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1849  __ j(equal, &arguments_adaptor, Label::kNear);
1850  {
1851  __ mov(edx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1852  __ mov(edx, FieldOperand(edx, JSFunction::kSharedFunctionInfoOffset));
1853  __ movzx_w(edx, FieldOperand(
1854  edx, SharedFunctionInfo::kFormalParameterCountOffset));
1855  __ mov(scratch, ebp);
1856  }
1857  __ jmp(&arguments_done, Label::kNear);
1858  __ bind(&arguments_adaptor);
1859  {
1860  // Just load the length from the ArgumentsAdaptorFrame.
1861  __ mov(edx,
1862  Operand(scratch, ArgumentsAdaptorFrameConstants::kLengthOffset));
1863  __ SmiUntag(edx);
1864  }
1865  __ bind(&arguments_done);
1866 
1867  Label stack_done, stack_overflow;
1868  __ sub(edx, ecx);
1869  __ j(less_equal, &stack_done);
1870  {
1871  Generate_StackOverflowCheck(masm, edx, ecx, &stack_overflow);
1872 
1873  // Forward the arguments from the caller frame.
1874  {
1875  Label loop;
1876  __ add(eax, edx);
1877  __ PopReturnAddressTo(ecx);
1878  __ bind(&loop);
1879  {
1880  __ Push(Operand(scratch, edx, times_pointer_size, 1 * kPointerSize));
1881  __ dec(edx);
1882  __ j(not_zero, &loop);
1883  }
1884  __ PushReturnAddressFrom(ecx);
1885  }
1886  }
1887  __ bind(&stack_done);
1888 
1889  __ movd(edx, xmm1); // Restore new.target (in case of [[Construct]]).
1890  __ movd(esi, xmm0); // Restore the context.
1891 
1892  // Tail-call to the {code} handler.
1893  __ Jump(code, RelocInfo::CODE_TARGET);
1894 
1895  __ bind(&stack_overflow);
1896  __ movd(esi, xmm0); // Restore the context.
1897  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1898 }
1899 
1900 // static
1901 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1902  ConvertReceiverMode mode) {
1903  // ----------- S t a t e -------------
1904  // -- eax : the number of arguments (not including the receiver)
1905  // -- edi : the function to call (checked to be a JSFunction)
1906  // -----------------------------------
1907  __ AssertFunction(edi);
1908 
1909  // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1910  // Check that the function is not a "classConstructor".
1911  Label class_constructor;
1912  __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1913  __ test(FieldOperand(edx, SharedFunctionInfo::kFlagsOffset),
1914  Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask));
1915  __ j(not_zero, &class_constructor);
1916 
1917  // Enter the context of the function; ToObject has to run in the function
1918  // context, and we also need to take the global proxy from the function
1919  // context in case of conversion.
1920  __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1921  // We need to convert the receiver for non-native sloppy mode functions.
1922  Label done_convert;
1923  __ test(FieldOperand(edx, SharedFunctionInfo::kFlagsOffset),
1924  Immediate(SharedFunctionInfo::IsNativeBit::kMask |
1925  SharedFunctionInfo::IsStrictBit::kMask));
1926  __ j(not_zero, &done_convert);
1927  {
1928  // ----------- S t a t e -------------
1929  // -- eax : the number of arguments (not including the receiver)
1930  // -- edx : the shared function info.
1931  // -- edi : the function to call (checked to be a JSFunction)
1932  // -- esi : the function context.
1933  // -----------------------------------
1934 
1935  if (mode == ConvertReceiverMode::kNullOrUndefined) {
1936  // Patch receiver to global proxy.
1937  __ LoadGlobalProxy(ecx);
1938  } else {
1939  Label convert_to_object, convert_receiver;
1940  __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
1941  __ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
1942  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1943  __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx); // Clobbers ecx.
1944  __ j(above_equal, &done_convert);
1945  // Reload the receiver (it was clobbered by CmpObjectType).
1946  __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
1947  if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1948  Label convert_global_proxy;
1949  __ JumpIfRoot(ecx, RootIndex::kUndefinedValue, &convert_global_proxy,
1950  Label::kNear);
1951  __ JumpIfNotRoot(ecx, RootIndex::kNullValue, &convert_to_object,
1952  Label::kNear);
1953  __ bind(&convert_global_proxy);
1954  {
1955  // Patch receiver to global proxy.
1956  __ LoadGlobalProxy(ecx);
1957  }
1958  __ jmp(&convert_receiver);
1959  }
1960  __ bind(&convert_to_object);
1961  {
1962  // Convert receiver using ToObject.
1963  // TODO(bmeurer): Inline the allocation here to avoid building the frame
1964  // in the fast case? (fall back to AllocateInNewSpace?)
1965  FrameScope scope(masm, StackFrame::INTERNAL);
1966  __ SmiTag(eax);
1967  __ Push(eax);
1968  __ Push(edi);
1969  __ mov(eax, ecx);
1970  __ Push(esi);
1971  __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1972  RelocInfo::CODE_TARGET);
1973  __ Pop(esi);
1974  __ mov(ecx, eax);
1975  __ Pop(edi);
1976  __ Pop(eax);
1977  __ SmiUntag(eax);
1978  }
1979  __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1980  __ bind(&convert_receiver);
1981  }
1982  __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx);
1983  }
1984  __ bind(&done_convert);
1985 
1986  // ----------- S t a t e -------------
1987  // -- eax : the number of arguments (not including the receiver)
1988  // -- edx : the shared function info.
1989  // -- edi : the function to call (checked to be a JSFunction)
1990  // -- esi : the function context.
1991  // -----------------------------------
1992 
1993  __ movzx_w(
1994  ecx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1995  ParameterCount actual(eax);
1996  ParameterCount expected(ecx);
1997  __ InvokeFunctionCode(edi, no_reg, expected, actual, JUMP_FUNCTION);
1998  // The function is a "classConstructor", need to raise an exception.
1999  __ bind(&class_constructor);
2000  {
2001  FrameScope frame(masm, StackFrame::INTERNAL);
2002  __ push(edi);
2003  __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2004  }
2005 }
2006 
2007 namespace {
2008 
2009 void Generate_PushBoundArguments(MacroAssembler* masm) {
2010  // ----------- S t a t e -------------
2011  // -- eax : the number of arguments (not including the receiver)
2012  // -- edx : new.target (only in case of [[Construct]])
2013  // -- edi : target (checked to be a JSBoundFunction)
2014  // -----------------------------------
2015 
2016  __ movd(xmm0, edx); // Spill edx.
2017 
2018  // Load [[BoundArguments]] into ecx and length of that into edx.
2019  Label no_bound_arguments;
2020  __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2021  __ mov(edx, FieldOperand(ecx, FixedArray::kLengthOffset));
2022  __ SmiUntag(edx);
2023  __ test(edx, edx);
2024  __ j(zero, &no_bound_arguments);
2025  {
2026  // ----------- S t a t e -------------
2027  // -- eax : the number of arguments (not including the receiver)
2028  // -- xmm0 : new.target (only in case of [[Construct]])
2029  // -- edi : target (checked to be a JSBoundFunction)
2030  // -- ecx : the [[BoundArguments]] (implemented as FixedArray)
2031  // -- edx : the number of [[BoundArguments]]
2032  // -----------------------------------
2033 
2034  // Reserve stack space for the [[BoundArguments]].
2035  {
2036  Label done;
2037  __ lea(ecx, Operand(edx, times_pointer_size, 0));
2038  __ sub(esp, ecx);
2039  // Check the stack for overflow. We are not trying to catch interruptions
2040  // (i.e. debug break and preemption) here, so check the "real stack
2041  // limit".
2042  __ CompareRealStackLimit(esp);
2043  __ j(above_equal, &done, Label::kNear);
2044  // Restore the stack pointer.
2045  __ lea(esp, Operand(esp, edx, times_pointer_size, 0));
2046  {
2047  FrameScope scope(masm, StackFrame::MANUAL);
2048  __ EnterFrame(StackFrame::INTERNAL);
2049  __ CallRuntime(Runtime::kThrowStackOverflow);
2050  }
2051  __ bind(&done);
2052  }
2053 
2054  // Adjust effective number of arguments to include return address.
2055  __ inc(eax);
2056 
2057  // Relocate arguments and return address down the stack.
2058  {
2059  Label loop;
2060  __ Set(ecx, 0);
2061  __ lea(edx, Operand(esp, edx, times_pointer_size, 0));
2062  __ bind(&loop);
2063  __ movd(xmm1, Operand(edx, ecx, times_pointer_size, 0));
2064  __ movd(Operand(esp, ecx, times_pointer_size, 0), xmm1);
2065  __ inc(ecx);
2066  __ cmp(ecx, eax);
2067  __ j(less, &loop);
2068  }
2069 
2070  // Copy [[BoundArguments]] to the stack (below the arguments).
2071  {
2072  Label loop;
2073  __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2074  __ mov(edx, FieldOperand(ecx, FixedArray::kLengthOffset));
2075  __ SmiUntag(edx);
2076  __ bind(&loop);
2077  __ dec(edx);
2078  __ movd(xmm1, FieldOperand(ecx, edx, times_pointer_size,
2079  FixedArray::kHeaderSize));
2080  __ movd(Operand(esp, eax, times_pointer_size, 0), xmm1);
2081  __ lea(eax, Operand(eax, 1));
2082  __ j(greater, &loop);
2083  }
2084 
2085  // Adjust effective number of arguments (eax contains the number of
2086  // arguments from the call plus return address plus the number of
2087  // [[BoundArguments]]), so we need to subtract one for the return address.
2088  __ dec(eax);
2089  }
2090 
2091  __ bind(&no_bound_arguments);
2092  __ movd(edx, xmm0); // Reload edx.
2093 }
2094 
2095 } // namespace
2096 
2097 // static
2098 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2099  // ----------- S t a t e -------------
2100  // -- eax : the number of arguments (not including the receiver)
2101  // -- edi : the function to call (checked to be a JSBoundFunction)
2102  // -----------------------------------
2103  __ AssertBoundFunction(edi);
2104 
2105  // Patch the receiver to [[BoundThis]].
2106  __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
2107  __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx);
2108 
2109  // Push the [[BoundArguments]] onto the stack.
2110  Generate_PushBoundArguments(masm);
2111 
2112  // Call the [[BoundTargetFunction]] via the Call builtin.
2113  __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2114  __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2115  RelocInfo::CODE_TARGET);
2116 }
2117 
2118 // static
2119 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2120  // ----------- S t a t e -------------
2121  // -- eax : the number of arguments (not including the receiver)
2122  // -- edi : the target to call (can be any Object).
2123  // -----------------------------------
2124 
2125  Label non_callable, non_function, non_smi, non_jsfunction,
2126  non_jsboundfunction;
2127  __ JumpIfSmi(edi, &non_callable);
2128  __ bind(&non_smi);
2129  __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2130  __ j(not_equal, &non_jsfunction);
2131  __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2132  RelocInfo::CODE_TARGET);
2133 
2134  __ bind(&non_jsfunction);
2135  __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2136  __ j(not_equal, &non_jsboundfunction);
2137  __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2138  RelocInfo::CODE_TARGET);
2139 
2140  // Check if target is a proxy and call CallProxy external builtin
2141  __ bind(&non_jsboundfunction);
2142  __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2143  Immediate(Map::IsCallableBit::kMask));
2144  __ j(zero, &non_callable);
2145 
2146  // Call CallProxy external builtin
2147  __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2148  __ j(not_equal, &non_function);
2149  __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
2150 
2151  // 2. Call to something else, which might have a [[Call]] internal method (if
2152  // not we raise an exception).
2153  __ bind(&non_function);
2154  // Overwrite the original receiver with the (original) target.
2155  __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2156  // Let the "call_as_function_delegate" take care of the rest.
2157  __ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, edi);
2158  __ Jump(masm->isolate()->builtins()->CallFunction(
2159  ConvertReceiverMode::kNotNullOrUndefined),
2160  RelocInfo::CODE_TARGET);
2161 
2162  // 3. Call to something that is not callable.
2163  __ bind(&non_callable);
2164  {
2165  FrameScope scope(masm, StackFrame::INTERNAL);
2166  __ Push(edi);
2167  __ CallRuntime(Runtime::kThrowCalledNonCallable);
2168  }
2169 }
2170 
2171 // static
2172 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2173  // ----------- S t a t e -------------
2174  // -- eax : the number of arguments (not including the receiver)
2175  // -- edx : the new target (checked to be a constructor)
2176  // -- edi : the constructor to call (checked to be a JSFunction)
2177  // -----------------------------------
2178  __ AssertConstructor(edi);
2179  __ AssertFunction(edi);
2180 
2181  Label call_generic_stub;
2182 
2183  // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2184  __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2185  __ test(FieldOperand(ecx, SharedFunctionInfo::kFlagsOffset),
2186  Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2187  __ j(zero, &call_generic_stub, Label::kNear);
2188 
2189  // Calling convention for function specific ConstructStubs require
2190  // ecx to contain either an AllocationSite or undefined.
2191  __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2192  __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2193  RelocInfo::CODE_TARGET);
2194 
2195  __ bind(&call_generic_stub);
2196  // Calling convention for function specific ConstructStubs require
2197  // ecx to contain either an AllocationSite or undefined.
2198  __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2199  __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2200  RelocInfo::CODE_TARGET);
2201 }
2202 
2203 // static
2204 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2205  // ----------- S t a t e -------------
2206  // -- eax : the number of arguments (not including the receiver)
2207  // -- edx : the new target (checked to be a constructor)
2208  // -- edi : the constructor to call (checked to be a JSBoundFunction)
2209  // -----------------------------------
2210  __ AssertConstructor(edi);
2211  __ AssertBoundFunction(edi);
2212 
2213  // Push the [[BoundArguments]] onto the stack.
2214  Generate_PushBoundArguments(masm);
2215 
2216  // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2217  {
2218  Label done;
2219  __ cmp(edi, edx);
2220  __ j(not_equal, &done, Label::kNear);
2221  __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2222  __ bind(&done);
2223  }
2224 
2225  // Construct the [[BoundTargetFunction]] via the Construct builtin.
2226  __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2227  __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2228 }
2229 
2230 // static
2231 void Builtins::Generate_Construct(MacroAssembler* masm) {
2232  // ----------- S t a t e -------------
2233  // -- eax : the number of arguments (not including the receiver)
2234  // -- edx : the new target (either the same as the constructor or
2235  // the JSFunction on which new was invoked initially)
2236  // -- edi : the constructor to call (can be any Object)
2237  // -----------------------------------
2238 
2239  // Check if target is a Smi.
2240  Label non_constructor, non_proxy, non_jsfunction, non_jsboundfunction;
2241  __ JumpIfSmi(edi, &non_constructor);
2242 
2243  // Check if target has a [[Construct]] internal method.
2244  __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
2245  __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2246  Immediate(Map::IsConstructorBit::kMask));
2247  __ j(zero, &non_constructor);
2248 
2249  // Dispatch based on instance type.
2250  __ CmpInstanceType(ecx, JS_FUNCTION_TYPE);
2251  __ j(not_equal, &non_jsfunction);
2252  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2253  RelocInfo::CODE_TARGET);
2254 
2255  // Only dispatch to bound functions after checking whether they are
2256  // constructors.
2257  __ bind(&non_jsfunction);
2258  __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2259  __ j(not_equal, &non_jsboundfunction);
2260  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2261  RelocInfo::CODE_TARGET);
2262 
2263  // Only dispatch to proxies after checking whether they are constructors.
2264  __ bind(&non_jsboundfunction);
2265  __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2266  __ j(not_equal, &non_proxy);
2267  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2268  RelocInfo::CODE_TARGET);
2269 
2270  // Called Construct on an exotic Object with a [[Construct]] internal method.
2271  __ bind(&non_proxy);
2272  {
2273  // Overwrite the original receiver with the (original) target.
2274  __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2275  // Let the "call_as_constructor_delegate" take care of the rest.
2276  __ LoadGlobalFunction(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, edi);
2277  __ Jump(masm->isolate()->builtins()->CallFunction(),
2278  RelocInfo::CODE_TARGET);
2279  }
2280 
2281  // Called Construct on an Object that doesn't have a [[Construct]] internal
2282  // method.
2283  __ bind(&non_constructor);
2284  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2285  RelocInfo::CODE_TARGET);
2286 }
2287 
2288 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2289  // ----------- S t a t e -------------
2290  // -- eax : actual number of arguments
2291  // -- ecx : expected number of arguments
2292  // -- edx : new target (passed through to callee)
2293  // -- edi : function (passed through to callee)
2294  // -----------------------------------
2295 
2296  const Register kExpectedNumberOfArgumentsRegister = ecx;
2297 
2298  Label invoke, dont_adapt_arguments, stack_overflow, enough, too_few;
2299  __ cmp(kExpectedNumberOfArgumentsRegister,
2300  SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2301  __ j(equal, &dont_adapt_arguments);
2302  __ cmp(eax, kExpectedNumberOfArgumentsRegister);
2303  __ j(less, &too_few);
2304 
2305  { // Enough parameters: Actual >= expected.
2306  __ bind(&enough);
2307  EnterArgumentsAdaptorFrame(masm);
2308  // edi is used as a scratch register. It should be restored from the frame
2309  // when needed.
2310  Generate_StackOverflowCheck(masm, kExpectedNumberOfArgumentsRegister, edi,
2311  &stack_overflow);
2312 
2313  // Copy receiver and all expected arguments.
2314  const int offset = StandardFrameConstants::kCallerSPOffset;
2315  __ lea(edi, Operand(ebp, eax, times_4, offset));
2316  __ mov(eax, -1); // account for receiver
2317 
2318  Label copy;
2319  __ bind(&copy);
2320  __ inc(eax);
2321  __ push(Operand(edi, 0));
2322  __ sub(edi, Immediate(kPointerSize));
2323  __ cmp(eax, kExpectedNumberOfArgumentsRegister);
2324  __ j(less, &copy);
2325  // eax now contains the expected number of arguments.
2326  __ jmp(&invoke);
2327  }
2328 
2329  { // Too few parameters: Actual < expected.
2330  __ bind(&too_few);
2331  EnterArgumentsAdaptorFrame(masm);
2332  // edi is used as a scratch register. It should be restored from the frame
2333  // when needed.
2334  Generate_StackOverflowCheck(masm, kExpectedNumberOfArgumentsRegister, edi,
2335  &stack_overflow);
2336 
2337  // Remember expected arguments in xmm0.
2338  __ movd(xmm0, kExpectedNumberOfArgumentsRegister);
2339 
2340  // Copy receiver and all actual arguments.
2341  const int offset = StandardFrameConstants::kCallerSPOffset;
2342  __ lea(edi, Operand(ebp, eax, times_4, offset));
2343  // ecx = expected - actual.
2344  __ sub(kExpectedNumberOfArgumentsRegister, eax);
2345  // eax = -actual - 1
2346  __ neg(eax);
2347  __ sub(eax, Immediate(1));
2348 
2349  Label copy;
2350  __ bind(&copy);
2351  __ inc(eax);
2352  __ push(Operand(edi, 0));
2353  __ sub(edi, Immediate(kPointerSize));
2354  __ test(eax, eax);
2355  __ j(not_zero, &copy);
2356 
2357  // Fill remaining expected arguments with undefined values.
2358  Label fill;
2359  __ bind(&fill);
2360  __ inc(eax);
2361  __ Push(Immediate(masm->isolate()->factory()->undefined_value()));
2362  __ cmp(eax, kExpectedNumberOfArgumentsRegister);
2363  __ j(less, &fill);
2364 
2365  // Restore expected arguments.
2366  __ movd(eax, xmm0);
2367  }
2368 
2369  // Call the entry point.
2370  __ bind(&invoke);
2371  // Restore function pointer.
2372  __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2373  // eax : expected number of arguments
2374  // edx : new target (passed through to callee)
2375  // edi : function (passed through to callee)
2376  static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
2377  __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
2378  __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2379  __ call(ecx);
2380 
2381  // Store offset of return address for deoptimizer.
2382  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2383 
2384  // Leave frame and return.
2385  LeaveArgumentsAdaptorFrame(masm);
2386  __ ret(0);
2387 
2388  // -------------------------------------------
2389  // Dont adapt arguments.
2390  // -------------------------------------------
2391  __ bind(&dont_adapt_arguments);
2392  static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
2393  __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
2394  __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2395  __ jmp(ecx);
2396 
2397  __ bind(&stack_overflow);
2398  {
2399  FrameScope frame(masm, StackFrame::MANUAL);
2400  __ CallRuntime(Runtime::kThrowStackOverflow);
2401  __ int3();
2402  }
2403 }
2404 
2405 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2406  // Lookup the function in the JavaScript frame.
2407  __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2408  __ mov(eax, Operand(eax, JavaScriptFrameConstants::kFunctionOffset));
2409 
2410  {
2411  FrameScope scope(masm, StackFrame::INTERNAL);
2412  // Pass function as argument.
2413  __ push(eax);
2414  __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2415  }
2416 
2417  Label skip;
2418  // If the code object is null, just return to the caller.
2419  __ cmp(eax, Immediate(0));
2420  __ j(not_equal, &skip, Label::kNear);
2421  __ ret(0);
2422 
2423  __ bind(&skip);
2424 
2425  // Drop the handler frame that is be sitting on top of the actual
2426  // JavaScript frame. This is the case then OSR is triggered from bytecode.
2427  __ leave();
2428 
2429  // Load deoptimization data from the code object.
2430  __ mov(ecx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2431 
2432  // Load the OSR entrypoint offset from the deoptimization data.
2433  __ mov(ecx, Operand(ecx, FixedArray::OffsetOfElementAt(
2434  DeoptimizationData::kOsrPcOffsetIndex) -
2435  kHeapObjectTag));
2436  __ SmiUntag(ecx);
2437 
2438  // Compute the target address = code_obj + header_size + osr_offset
2439  __ lea(eax, Operand(eax, ecx, times_1, Code::kHeaderSize - kHeapObjectTag));
2440 
2441  // Overwrite the return address on the stack.
2442  __ mov(Operand(esp, 0), eax);
2443 
2444  // And "return" to the OSR entry point of the function.
2445  __ ret(0);
2446 }
2447 
2448 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2449  // The function index was put in edi by the jump table trampoline.
2450  // Convert to Smi for the runtime call.
2451  __ SmiTag(kWasmCompileLazyFuncIndexRegister);
2452  {
2453  HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2454  FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2455 
2456  // Save all parameter registers (see wasm-linkage.cc). They might be
2457  // overwritten in the runtime call below. We don't have any callee-saved
2458  // registers in wasm, so no need to store anything else.
2459  static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
2460  arraysize(wasm::kGpParamRegisters),
2461  "frame size mismatch");
2462  for (Register reg : wasm::kGpParamRegisters) {
2463  __ Push(reg);
2464  }
2465  static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs ==
2466  arraysize(wasm::kFpParamRegisters),
2467  "frame size mismatch");
2468  __ sub(esp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2469  int offset = 0;
2470  for (DoubleRegister reg : wasm::kFpParamRegisters) {
2471  __ movdqu(Operand(esp, offset), reg);
2472  offset += kSimd128Size;
2473  }
2474 
2475  // Push the WASM instance as an explicit argument to WasmCompileLazy.
2476  __ Push(kWasmInstanceRegister);
2477  // Push the function index as second argument.
2478  __ Push(kWasmCompileLazyFuncIndexRegister);
2479  // Load the correct CEntry builtin from the instance object.
2480  __ mov(ecx, FieldOperand(kWasmInstanceRegister,
2481  WasmInstanceObject::kCEntryStubOffset));
2482  // Initialize the JavaScript context with 0. CEntry will use it to
2483  // set the current context on the isolate.
2484  __ Move(kContextRegister, Smi::zero());
2485  {
2486  // At this point, ebx has been spilled to the stack but is not yet
2487  // overwritten with another value. We can still use it as kRootRegister.
2488  __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, ecx);
2489  }
2490  // The entrypoint address is the return value.
2491  __ mov(edi, kReturnRegister0);
2492 
2493  // Restore registers.
2494  for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
2495  offset -= kSimd128Size;
2496  __ movdqu(reg, Operand(esp, offset));
2497  }
2498  DCHECK_EQ(0, offset);
2499  __ add(esp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2500  for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
2501  __ Pop(reg);
2502  }
2503  }
2504  // Finally, jump to the entrypoint.
2505  __ jmp(edi);
2506 }
2507 
2508 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2509  SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2510  bool builtin_exit_frame) {
2511  // eax: number of arguments including receiver
2512  // edx: pointer to C function
2513  // ebp: frame pointer (restored after C call)
2514  // esp: stack pointer (restored after C call)
2515  // esi: current context (C callee-saved)
2516  // edi: JS function of the caller (C callee-saved)
2517  //
2518  // If argv_mode == kArgvInRegister:
2519  // ecx: pointer to the first argument
2520 
2521  STATIC_ASSERT(eax == kRuntimeCallArgCountRegister);
2522  STATIC_ASSERT(ecx == kRuntimeCallArgvRegister);
2523  STATIC_ASSERT(edx == kRuntimeCallFunctionRegister);
2524  STATIC_ASSERT(esi == kContextRegister);
2525  STATIC_ASSERT(edi == kJSFunctionRegister);
2526 
2527  DCHECK(!AreAliased(kRuntimeCallArgCountRegister, kRuntimeCallArgvRegister,
2528  kRuntimeCallFunctionRegister, kContextRegister,
2529  kJSFunctionRegister, kRootRegister));
2530 
2531  // Reserve space on the stack for the three arguments passed to the call. If
2532  // result size is greater than can be returned in registers, also reserve
2533  // space for the hidden argument for the result location, and space for the
2534  // result itself.
2535  int arg_stack_space = 3;
2536 
2537  // Enter the exit frame that transitions from JavaScript to C++.
2538  if (argv_mode == kArgvInRegister) {
2539  DCHECK(save_doubles == kDontSaveFPRegs);
2540  DCHECK(!builtin_exit_frame);
2541  __ EnterApiExitFrame(arg_stack_space, edi);
2542 
2543  // Move argc and argv into the correct registers.
2544  __ mov(esi, ecx);
2545  __ mov(edi, eax);
2546  } else {
2547  __ EnterExitFrame(
2548  arg_stack_space, save_doubles == kSaveFPRegs,
2549  builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2550  }
2551 
2552  // edx: pointer to C function
2553  // ebp: frame pointer (restored after C call)
2554  // esp: stack pointer (restored after C call)
2555  // edi: number of arguments including receiver (C callee-saved)
2556  // esi: pointer to the first argument (C callee-saved)
2557 
2558  // Result returned in eax, or eax+edx if result size is 2.
2559 
2560  // Check stack alignment.
2561  if (FLAG_debug_code) {
2562  __ CheckStackAlignment();
2563  }
2564  // Call C function.
2565  __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
2566  __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
2567  __ Move(ecx, Immediate(ExternalReference::isolate_address(masm->isolate())));
2568  __ mov(Operand(esp, 2 * kPointerSize), ecx);
2569  __ call(kRuntimeCallFunctionRegister);
2570 
2571  // Result is in eax or edx:eax - do not destroy these registers!
2572 
2573  // Check result for exception sentinel.
2574  Label exception_returned;
2575  __ CompareRoot(eax, RootIndex::kException);
2576  __ j(equal, &exception_returned);
2577 
2578  // Check that there is no pending exception, otherwise we
2579  // should have returned the exception sentinel.
2580  if (FLAG_debug_code) {
2581  __ push(edx);
2582  __ LoadRoot(edx, RootIndex::kTheHoleValue);
2583  Label okay;
2584  ExternalReference pending_exception_address = ExternalReference::Create(
2585  IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2586  __ cmp(edx, __ ExternalReferenceAsOperand(pending_exception_address, ecx));
2587  // Cannot use check here as it attempts to generate call into runtime.
2588  __ j(equal, &okay, Label::kNear);
2589  __ int3();
2590  __ bind(&okay);
2591  __ pop(edx);
2592  }
2593 
2594  // Exit the JavaScript to C++ exit frame.
2595  __ LeaveExitFrame(save_doubles == kSaveFPRegs, argv_mode == kArgvOnStack);
2596  __ ret(0);
2597 
2598  // Handling of exception.
2599  __ bind(&exception_returned);
2600 
2601  ExternalReference pending_handler_context_address = ExternalReference::Create(
2602  IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2603  ExternalReference pending_handler_entrypoint_address =
2604  ExternalReference::Create(
2605  IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2606  ExternalReference pending_handler_fp_address = ExternalReference::Create(
2607  IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2608  ExternalReference pending_handler_sp_address = ExternalReference::Create(
2609  IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2610 
2611  // Ask the runtime for help to determine the handler. This will set eax to
2612  // contain the current pending exception, don't clobber it.
2613  ExternalReference find_handler =
2614  ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2615  {
2616  FrameScope scope(masm, StackFrame::MANUAL);
2617  __ PrepareCallCFunction(3, eax);
2618  __ mov(Operand(esp, 0 * kPointerSize), Immediate(0)); // argc.
2619  __ mov(Operand(esp, 1 * kPointerSize), Immediate(0)); // argv.
2620  __ Move(esi,
2621  Immediate(ExternalReference::isolate_address(masm->isolate())));
2622  __ mov(Operand(esp, 2 * kPointerSize), esi);
2623  __ CallCFunction(find_handler, 3);
2624  }
2625 
2626  // Retrieve the handler context, SP and FP.
2627  __ mov(esp, __ ExternalReferenceAsOperand(pending_handler_sp_address, esi));
2628  __ mov(ebp, __ ExternalReferenceAsOperand(pending_handler_fp_address, esi));
2629  __ mov(esi,
2630  __ ExternalReferenceAsOperand(pending_handler_context_address, esi));
2631 
2632  // If the handler is a JS frame, restore the context to the frame. Note that
2633  // the context will be set to (esi == 0) for non-JS frames.
2634  Label skip;
2635  __ test(esi, esi);
2636  __ j(zero, &skip, Label::kNear);
2637  __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
2638  __ bind(&skip);
2639 
2640  // Compute the handler entry address and jump to it.
2641  __ mov(edi, __ ExternalReferenceAsOperand(pending_handler_entrypoint_address,
2642  edi));
2643  __ jmp(edi);
2644 }
2645 
2646 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2647  Label check_negative, process_64_bits, done;
2648 
2649  // Account for return address and saved regs.
2650  const int kArgumentOffset = 4 * kPointerSize;
2651 
2652  MemOperand mantissa_operand(MemOperand(esp, kArgumentOffset));
2653  MemOperand exponent_operand(
2654  MemOperand(esp, kArgumentOffset + kDoubleSize / 2));
2655 
2656  // The result is returned on the stack.
2657  MemOperand return_operand = mantissa_operand;
2658 
2659  Register scratch1 = ebx;
2660 
2661  // Since we must use ecx for shifts below, use some other register (eax)
2662  // to calculate the result.
2663  Register result_reg = eax;
2664  // Save ecx if it isn't the return register and therefore volatile, or if it
2665  // is the return register, then save the temp register we use in its stead for
2666  // the result.
2667  Register save_reg = eax;
2668  __ push(ecx);
2669  __ push(scratch1);
2670  __ push(save_reg);
2671 
2672  __ mov(scratch1, mantissa_operand);
2673  if (CpuFeatures::IsSupported(SSE3)) {
2674  CpuFeatureScope scope(masm, SSE3);
2675  // Load x87 register with heap number.
2676  __ fld_d(mantissa_operand);
2677  }
2678  __ mov(ecx, exponent_operand);
2679 
2680  __ and_(ecx, HeapNumber::kExponentMask);
2681  __ shr(ecx, HeapNumber::kExponentShift);
2682  __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
2683  __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
2684  __ j(below, &process_64_bits);
2685 
2686  // Result is entirely in lower 32-bits of mantissa
2687  int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
2688  if (CpuFeatures::IsSupported(SSE3)) {
2689  __ fstp(0);
2690  }
2691  __ sub(ecx, Immediate(delta));
2692  __ xor_(result_reg, result_reg);
2693  __ cmp(ecx, Immediate(31));
2694  __ j(above, &done);
2695  __ shl_cl(scratch1);
2696  __ jmp(&check_negative);
2697 
2698  __ bind(&process_64_bits);
2699  if (CpuFeatures::IsSupported(SSE3)) {
2700  CpuFeatureScope scope(masm, SSE3);
2701  // Reserve space for 64 bit answer.
2702  __ sub(esp, Immediate(kDoubleSize)); // Nolint.
2703  // Do conversion, which cannot fail because we checked the exponent.
2704  __ fisttp_d(Operand(esp, 0));
2705  __ mov(result_reg, Operand(esp, 0)); // Load low word of answer as result
2706  __ add(esp, Immediate(kDoubleSize));
2707  __ jmp(&done);
2708  } else {
2709  // Result must be extracted from shifted 32-bit mantissa
2710  __ sub(ecx, Immediate(delta));
2711  __ neg(ecx);
2712  __ mov(result_reg, exponent_operand);
2713  __ and_(result_reg,
2714  Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
2715  __ add(result_reg,
2716  Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
2717  __ shrd_cl(scratch1, result_reg);
2718  __ shr_cl(result_reg);
2719  __ test(ecx, Immediate(32));
2720  __ cmov(not_equal, scratch1, result_reg);
2721  }
2722 
2723  // If the double was negative, negate the integer result.
2724  __ bind(&check_negative);
2725  __ mov(result_reg, scratch1);
2726  __ neg(result_reg);
2727  __ cmp(exponent_operand, Immediate(0));
2728  __ cmov(greater, result_reg, scratch1);
2729 
2730  // Restore registers
2731  __ bind(&done);
2732  __ mov(return_operand, result_reg);
2733  __ pop(save_reg);
2734  __ pop(scratch1);
2735  __ pop(ecx);
2736  __ ret(0);
2737 }
2738 
2739 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2740  const Register exponent = eax;
2741  const Register scratch = ecx;
2742  const XMMRegister double_result = xmm3;
2743  const XMMRegister double_base = xmm2;
2744  const XMMRegister double_exponent = xmm1;
2745  const XMMRegister double_scratch = xmm4;
2746 
2747  Label call_runtime, done, exponent_not_smi, int_exponent;
2748 
2749  // Save 1 in double_result - we need this several times later on.
2750  __ mov(scratch, Immediate(1));
2751  __ Cvtsi2sd(double_result, scratch);
2752 
2753  Label fast_power, try_arithmetic_simplification;
2754  __ DoubleToI(exponent, double_exponent, double_scratch,
2755  &try_arithmetic_simplification, &try_arithmetic_simplification);
2756  __ jmp(&int_exponent);
2757 
2758  __ bind(&try_arithmetic_simplification);
2759  // Skip to runtime if possibly NaN (indicated by the indefinite integer).
2760  __ cvttsd2si(exponent, Operand(double_exponent));
2761  __ cmp(exponent, Immediate(0x1));
2762  __ j(overflow, &call_runtime);
2763 
2764  // Using FPU instructions to calculate power.
2765  Label fast_power_failed;
2766  __ bind(&fast_power);
2767  __ fnclex(); // Clear flags to catch exceptions later.
2768  // Transfer (B)ase and (E)xponent onto the FPU register stack.
2769  __ sub(esp, Immediate(kDoubleSize));
2770  __ movsd(Operand(esp, 0), double_exponent);
2771  __ fld_d(Operand(esp, 0)); // E
2772  __ movsd(Operand(esp, 0), double_base);
2773  __ fld_d(Operand(esp, 0)); // B, E
2774 
2775  // Exponent is in st(1) and base is in st(0)
2776  // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
2777  // FYL2X calculates st(1) * log2(st(0))
2778  __ fyl2x(); // X
2779  __ fld(0); // X, X
2780  __ frndint(); // rnd(X), X
2781  __ fsub(1); // rnd(X), X-rnd(X)
2782  __ fxch(1); // X - rnd(X), rnd(X)
2783  // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
2784  __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
2785  __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
2786  __ faddp(1); // 2^(X-rnd(X)), rnd(X)
2787  // FSCALE calculates st(0) * 2^st(1)
2788  __ fscale(); // 2^X, rnd(X)
2789  __ fstp(1); // 2^X
2790  // Bail out to runtime in case of exceptions in the status word.
2791  __ fnstsw_ax();
2792  __ test_b(eax, Immediate(0x5F)); // We check for all but precision exception.
2793  __ j(not_zero, &fast_power_failed, Label::kNear);
2794  __ fstp_d(Operand(esp, 0));
2795  __ movsd(double_result, Operand(esp, 0));
2796  __ add(esp, Immediate(kDoubleSize));
2797  __ jmp(&done);
2798 
2799  __ bind(&fast_power_failed);
2800  __ fninit();
2801  __ add(esp, Immediate(kDoubleSize));
2802  __ jmp(&call_runtime);
2803 
2804  // Calculate power with integer exponent.
2805  __ bind(&int_exponent);
2806  const XMMRegister double_scratch2 = double_exponent;
2807  __ mov(scratch, exponent); // Back up exponent.
2808  __ movsd(double_scratch, double_base); // Back up base.
2809  __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
2810 
2811  // Get absolute value of exponent.
2812  Label no_neg, while_true, while_false;
2813  __ test(scratch, scratch);
2814  __ j(positive, &no_neg, Label::kNear);
2815  __ neg(scratch);
2816  __ bind(&no_neg);
2817 
2818  __ j(zero, &while_false, Label::kNear);
2819  __ shr(scratch, 1);
2820  // Above condition means CF==0 && ZF==0. This means that the
2821  // bit that has been shifted out is 0 and the result is not 0.
2822  __ j(above, &while_true, Label::kNear);
2823  __ movsd(double_result, double_scratch);
2824  __ j(zero, &while_false, Label::kNear);
2825 
2826  __ bind(&while_true);
2827  __ shr(scratch, 1);
2828  __ mulsd(double_scratch, double_scratch);
2829  __ j(above, &while_true, Label::kNear);
2830  __ mulsd(double_result, double_scratch);
2831  __ j(not_zero, &while_true);
2832 
2833  __ bind(&while_false);
2834  // scratch has the original value of the exponent - if the exponent is
2835  // negative, return 1/result.
2836  __ test(exponent, exponent);
2837  __ j(positive, &done);
2838  __ divsd(double_scratch2, double_result);
2839  __ movsd(double_result, double_scratch2);
2840  // Test whether result is zero. Bail out to check for subnormal result.
2841  // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
2842  __ xorps(double_scratch2, double_scratch2);
2843  __ ucomisd(double_scratch2, double_result); // Result cannot be NaN.
2844  // double_exponent aliased as double_scratch2 has already been overwritten
2845  // and may not have contained the exponent value in the first place when the
2846  // exponent is a smi. We reset it with exponent value before bailing out.
2847  __ j(not_equal, &done);
2848  __ Cvtsi2sd(double_exponent, exponent);
2849 
2850  // Returning or bailing out.
2851  __ bind(&call_runtime);
2852  {
2853  AllowExternalCallThatCantCauseGC scope(masm);
2854  __ PrepareCallCFunction(4, scratch);
2855  __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
2856  __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
2857  __ CallCFunction(ExternalReference::power_double_double_function(), 4);
2858  }
2859  // Return value is in st(0) on ia32.
2860  // Store it into the (fixed) result register.
2861  __ sub(esp, Immediate(kDoubleSize));
2862  __ fstp_d(Operand(esp, 0));
2863  __ movsd(double_result, Operand(esp, 0));
2864  __ add(esp, Immediate(kDoubleSize));
2865 
2866  __ bind(&done);
2867  __ ret(0);
2868 }
2869 
2870 namespace {
2871 
2872 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2873  ElementsKind kind) {
2874  Label not_zero_case, not_one_case;
2875  Label normal_sequence;
2876 
2877  __ test(eax, eax);
2878  __ j(not_zero, &not_zero_case);
2879  __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2880  .code(),
2881  RelocInfo::CODE_TARGET);
2882 
2883  __ bind(&not_zero_case);
2884  __ cmp(eax, 1);
2885  __ j(greater, &not_one_case);
2886 
2887  if (IsFastPackedElementsKind(kind)) {
2888  // We might need to create a holey array
2889  // look at the first argument
2890  __ mov(ecx, Operand(esp, kPointerSize));
2891  __ test(ecx, ecx);
2892  __ j(zero, &normal_sequence);
2893 
2894  __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2895  masm->isolate(), GetHoleyElementsKind(kind))
2896  .code(),
2897  RelocInfo::CODE_TARGET);
2898  }
2899 
2900  __ bind(&normal_sequence);
2901  __ Jump(
2902  CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2903  .code(),
2904  RelocInfo::CODE_TARGET);
2905 
2906  __ bind(&not_one_case);
2907  // Load undefined into the allocation site parameter as required by
2908  // ArrayNArgumentsConstructor.
2909  __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue);
2910  Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
2911  __ Jump(code, RelocInfo::CODE_TARGET);
2912 }
2913 
2914 } // namespace
2915 
2916 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2917  // ----------- S t a t e -------------
2918  // -- eax : argc
2919  // -- edi : constructor
2920  // -- esp[0] : return address
2921  // -- esp[4] : last argument
2922  // -----------------------------------
2923 
2924  if (FLAG_debug_code) {
2925  // The array construct code is only set for the global and natives
2926  // builtin Array functions which always have maps.
2927 
2928  // Initial map for the builtin Array function should be a map.
2929  __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
2930  // Will both indicate a nullptr and a Smi.
2931  __ test(ecx, Immediate(kSmiTagMask));
2932  __ Assert(not_zero, AbortReason::kUnexpectedInitialMapForArrayFunction);
2933  __ CmpObjectType(ecx, MAP_TYPE, ecx);
2934  __ Assert(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
2935  }
2936 
2937  // Figure out the right elements kind
2938  __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
2939 
2940  // Load the map's "bit field 2" into |result|. We only need the first byte,
2941  // but the following masking takes care of that anyway.
2942  __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
2943  // Retrieve elements_kind from bit field 2.
2944  __ DecodeField<Map::ElementsKindBits>(ecx);
2945 
2946  if (FLAG_debug_code) {
2947  Label done;
2948  __ cmp(ecx, Immediate(PACKED_ELEMENTS));
2949  __ j(equal, &done);
2950  __ cmp(ecx, Immediate(HOLEY_ELEMENTS));
2951  __ Assert(
2952  equal,
2953  AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2954  __ bind(&done);
2955  }
2956 
2957  Label fast_elements_case;
2958  __ cmp(ecx, Immediate(PACKED_ELEMENTS));
2959  __ j(equal, &fast_elements_case);
2960  GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2961 
2962  __ bind(&fast_elements_case);
2963  GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2964 }
2965 
2966 #undef __
2967 
2968 } // namespace internal
2969 } // namespace v8
2970 
2971 #endif // V8_TARGET_ARCH_IA32
Definition: libplatform.h:13