V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
builtins-mips64.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_MIPS64
6 
7 #include "src/code-factory.h"
8 #include "src/code-stubs.h"
9 #include "src/counters.h"
10 #include "src/debug/debug.h"
11 #include "src/deoptimizer.h"
12 #include "src/frame-constants.h"
13 #include "src/frames.h"
14 #include "src/mips64/constants-mips64.h"
15 #include "src/objects-inl.h"
16 #include "src/objects/js-generator.h"
17 #include "src/objects/smi.h"
18 #include "src/register-configuration.h"
19 #include "src/runtime/runtime.h"
20 #include "src/wasm/wasm-objects.h"
21 
22 namespace v8 {
23 namespace internal {
24 
25 #define __ ACCESS_MASM(masm)
26 
27 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
28  ExitFrameType exit_frame_type) {
29  __ li(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
30  if (exit_frame_type == BUILTIN_EXIT) {
31  __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
32  RelocInfo::CODE_TARGET);
33  } else {
34  DCHECK(exit_frame_type == EXIT);
35  __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
36  RelocInfo::CODE_TARGET);
37  }
38 }
39 
40 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
41  // ----------- S t a t e -------------
42  // -- a0 : number of arguments
43  // -- ra : return address
44  // -- sp[...]: constructor arguments
45  // -----------------------------------
46  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
47 
48  if (FLAG_debug_code) {
49  // Initial map for the builtin InternalArray functions should be maps.
50  __ Ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
51  __ SmiTst(a2, a4);
52  __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction,
53  a4, Operand(zero_reg));
54  __ GetObjectType(a2, a3, a4);
55  __ Assert(eq, AbortReason::kUnexpectedInitialMapForInternalArrayFunction,
56  a4, Operand(MAP_TYPE));
57  }
58 
59  // Run the native code for the InternalArray function called as a normal
60  // function.
61  __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
62  RelocInfo::CODE_TARGET);
63 }
64 
65 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
66  Runtime::FunctionId function_id) {
67  // ----------- S t a t e -------------
68  // -- a0 : argument count (preserved for callee)
69  // -- a1 : target function (preserved for callee)
70  // -- a3 : new target (preserved for callee)
71  // -----------------------------------
72  {
73  FrameScope scope(masm, StackFrame::INTERNAL);
74  // Push a copy of the function onto the stack.
75  // Push a copy of the target function and the new target.
76  __ SmiTag(a0);
77  __ Push(a0, a1, a3, a1);
78 
79  __ CallRuntime(function_id, 1);
80  // Restore target function and new target.
81  __ Pop(a0, a1, a3);
82  __ SmiUntag(a0);
83  }
84 
85  static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
86  __ Daddu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
87  __ Jump(a2);
88 }
89 
90 namespace {
91 
92 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
93  // ----------- S t a t e -------------
94  // -- a0 : number of arguments
95  // -- a1 : constructor function
96  // -- a3 : new target
97  // -- cp : context
98  // -- ra : return address
99  // -- sp[...]: constructor arguments
100  // -----------------------------------
101 
102  // Enter a construct frame.
103  {
104  FrameScope scope(masm, StackFrame::CONSTRUCT);
105 
106  // Preserve the incoming parameters on the stack.
107  __ SmiTag(a0);
108  __ Push(cp, a0);
109  __ SmiUntag(a0);
110 
111  // The receiver for the builtin/api call.
112  __ PushRoot(RootIndex::kTheHoleValue);
113 
114  // Set up pointer to last argument.
115  __ Daddu(t2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
116 
117  // Copy arguments and receiver to the expression stack.
118  Label loop, entry;
119  __ mov(t3, a0);
120  // ----------- S t a t e -------------
121  // -- a0: number of arguments (untagged)
122  // -- a3: new target
123  // -- t2: pointer to last argument
124  // -- t3: counter
125  // -- sp[0*kPointerSize]: the hole (receiver)
126  // -- sp[1*kPointerSize]: number of arguments (tagged)
127  // -- sp[2*kPointerSize]: context
128  // -----------------------------------
129  __ jmp(&entry);
130  __ bind(&loop);
131  __ Dlsa(t0, t2, t3, kPointerSizeLog2);
132  __ Ld(t1, MemOperand(t0));
133  __ push(t1);
134  __ bind(&entry);
135  __ Daddu(t3, t3, Operand(-1));
136  __ Branch(&loop, greater_equal, t3, Operand(zero_reg));
137 
138  // Call the function.
139  // a0: number of arguments (untagged)
140  // a1: constructor function
141  // a3: new target
142  ParameterCount actual(a0);
143  __ InvokeFunction(a1, a3, actual, CALL_FUNCTION);
144 
145  // Restore context from the frame.
146  __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
147  // Restore smi-tagged arguments count from the frame.
148  __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
149  // Leave construct frame.
150  }
151 
152  // Remove caller arguments from the stack and return.
153  __ SmiScale(a4, a1, kPointerSizeLog2);
154  __ Daddu(sp, sp, a4);
155  __ Daddu(sp, sp, kPointerSize);
156  __ Ret();
157 }
158 
159 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
160  Register scratch1, Register scratch2,
161  Label* stack_overflow) {
162  // Check the stack for overflow. We are not trying to catch
163  // interruptions (e.g. debug break and preemption) here, so the "real stack
164  // limit" is checked.
165  __ LoadRoot(scratch1, RootIndex::kRealStackLimit);
166  // Make scratch1 the space we have left. The stack might already be overflowed
167  // here which will cause scratch1 to become negative.
168  __ dsubu(scratch1, sp, scratch1);
169  // Check if the arguments will overflow the stack.
170  __ dsll(scratch2, num_args, kPointerSizeLog2);
171  // Signed comparison.
172  __ Branch(stack_overflow, le, scratch1, Operand(scratch2));
173 }
174 
175 } // namespace
176 
177 // The construct stub for ES5 constructor functions and ES6 class constructors.
178 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
179  // ----------- S t a t e -------------
180  // -- a0: number of arguments (untagged)
181  // -- a1: constructor function
182  // -- a3: new target
183  // -- cp: context
184  // -- ra: return address
185  // -- sp[...]: constructor arguments
186  // -----------------------------------
187 
188  // Enter a construct frame.
189  {
190  FrameScope scope(masm, StackFrame::CONSTRUCT);
191  Label post_instantiation_deopt_entry, not_create_implicit_receiver;
192 
193  // Preserve the incoming parameters on the stack.
194  __ SmiTag(a0);
195  __ Push(cp, a0, a1);
196  __ PushRoot(RootIndex::kTheHoleValue);
197  __ Push(a3);
198 
199  // ----------- S t a t e -------------
200  // -- sp[0*kPointerSize]: new target
201  // -- sp[1*kPointerSize]: padding
202  // -- a1 and sp[2*kPointerSize]: constructor function
203  // -- sp[3*kPointerSize]: number of arguments (tagged)
204  // -- sp[4*kPointerSize]: context
205  // -----------------------------------
206 
207  __ Ld(t2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
208  __ lwu(t2, FieldMemOperand(t2, SharedFunctionInfo::kFlagsOffset));
209  __ And(t2, t2, Operand(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
210  __ Branch(&not_create_implicit_receiver, ne, t2, Operand(zero_reg));
211 
212  // If not derived class constructor: Allocate the new receiver object.
213  __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
214  t2, t3);
215  __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
216  RelocInfo::CODE_TARGET);
217  __ Branch(&post_instantiation_deopt_entry);
218 
219  // Else: use TheHoleValue as receiver for constructor call
220  __ bind(&not_create_implicit_receiver);
221  __ LoadRoot(v0, RootIndex::kTheHoleValue);
222 
223  // ----------- S t a t e -------------
224  // -- v0: receiver
225  // -- Slot 4 / sp[0*kPointerSize]: new target
226  // -- Slot 3 / sp[1*kPointerSize]: padding
227  // -- Slot 2 / sp[2*kPointerSize]: constructor function
228  // -- Slot 1 / sp[3*kPointerSize]: number of arguments (tagged)
229  // -- Slot 0 / sp[4*kPointerSize]: context
230  // -----------------------------------
231  // Deoptimizer enters here.
232  masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
233  masm->pc_offset());
234  __ bind(&post_instantiation_deopt_entry);
235 
236  // Restore new target.
237  __ Pop(a3);
238  // Push the allocated receiver to the stack. We need two copies
239  // because we may have to return the original one and the calling
240  // conventions dictate that the called function pops the receiver.
241  __ Push(v0, v0);
242 
243  // ----------- S t a t e -------------
244  // -- r3: new target
245  // -- sp[0*kPointerSize]: implicit receiver
246  // -- sp[1*kPointerSize]: implicit receiver
247  // -- sp[2*kPointerSize]: padding
248  // -- sp[3*kPointerSize]: constructor function
249  // -- sp[4*kPointerSize]: number of arguments (tagged)
250  // -- sp[5*kPointerSize]: context
251  // -----------------------------------
252 
253  // Restore constructor function and argument count.
254  __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
255  __ Ld(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
256  __ SmiUntag(a0);
257 
258  // Set up pointer to last argument.
259  __ Daddu(t2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
260 
261  Label enough_stack_space, stack_overflow;
262  Generate_StackOverflowCheck(masm, a0, t0, t1, &stack_overflow);
263  __ Branch(&enough_stack_space);
264 
265  __ bind(&stack_overflow);
266  // Restore the context from the frame.
267  __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
268  __ CallRuntime(Runtime::kThrowStackOverflow);
269  // Unreachable code.
270  __ break_(0xCC);
271 
272  __ bind(&enough_stack_space);
273 
274  // Copy arguments and receiver to the expression stack.
275  Label loop, entry;
276  __ mov(t3, a0);
277  // ----------- S t a t e -------------
278  // -- a0: number of arguments (untagged)
279  // -- a3: new target
280  // -- t2: pointer to last argument
281  // -- t3: counter
282  // -- sp[0*kPointerSize]: implicit receiver
283  // -- sp[1*kPointerSize]: implicit receiver
284  // -- sp[2*kPointerSize]: padding
285  // -- a1 and sp[3*kPointerSize]: constructor function
286  // -- sp[4*kPointerSize]: number of arguments (tagged)
287  // -- sp[5*kPointerSize]: context
288  // -----------------------------------
289  __ jmp(&entry);
290  __ bind(&loop);
291  __ Dlsa(t0, t2, t3, kPointerSizeLog2);
292  __ Ld(t1, MemOperand(t0));
293  __ push(t1);
294  __ bind(&entry);
295  __ Daddu(t3, t3, Operand(-1));
296  __ Branch(&loop, greater_equal, t3, Operand(zero_reg));
297 
298  // Call the function.
299  ParameterCount actual(a0);
300  __ InvokeFunction(a1, a3, actual, CALL_FUNCTION);
301 
302  // ----------- S t a t e -------------
303  // -- v0: constructor result
304  // -- sp[0*kPointerSize]: implicit receiver
305  // -- sp[1*kPointerSize]: padding
306  // -- sp[2*kPointerSize]: constructor function
307  // -- sp[3*kPointerSize]: number of arguments
308  // -- sp[4*kPointerSize]: context
309  // -----------------------------------
310 
311  // Store offset of return address for deoptimizer.
312  masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
313  masm->pc_offset());
314 
315  // Restore the context from the frame.
316  __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
317 
318  // If the result is an object (in the ECMA sense), we should get rid
319  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
320  // on page 74.
321  Label use_receiver, do_throw, leave_frame;
322 
323  // If the result is undefined, we jump out to using the implicit receiver.
324  __ JumpIfRoot(v0, RootIndex::kUndefinedValue, &use_receiver);
325 
326  // Otherwise we do a smi check and fall through to check if the return value
327  // is a valid receiver.
328 
329  // If the result is a smi, it is *not* an object in the ECMA sense.
330  __ JumpIfSmi(v0, &use_receiver);
331 
332  // If the type of the result (stored in its map) is less than
333  // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
334  __ GetObjectType(v0, t2, t2);
335  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
336  __ Branch(&leave_frame, greater_equal, t2, Operand(FIRST_JS_RECEIVER_TYPE));
337  __ Branch(&use_receiver);
338 
339  __ bind(&do_throw);
340  __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
341 
342  // Throw away the result of the constructor invocation and use the
343  // on-stack receiver as the result.
344  __ bind(&use_receiver);
345  __ Ld(v0, MemOperand(sp, 0 * kPointerSize));
346  __ JumpIfRoot(v0, RootIndex::kTheHoleValue, &do_throw);
347 
348  __ bind(&leave_frame);
349  // Restore smi-tagged arguments count from the frame.
350  __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
351  // Leave construct frame.
352  }
353  // Remove caller arguments from the stack and return.
354  __ SmiScale(a4, a1, kPointerSizeLog2);
355  __ Daddu(sp, sp, a4);
356  __ Daddu(sp, sp, kPointerSize);
357  __ Ret();
358 }
359 
360 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
361  Generate_JSBuiltinsConstructStubHelper(masm);
362 }
363 
364 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
365  Register sfi_data,
366  Register scratch1) {
367  Label done;
368 
369  __ GetObjectType(sfi_data, scratch1, scratch1);
370  __ Branch(&done, ne, scratch1, Operand(INTERPRETER_DATA_TYPE));
371  __ Ld(sfi_data,
372  FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
373 
374  __ bind(&done);
375 }
376 
377 // static
378 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
379  // ----------- S t a t e -------------
380  // -- v0 : the value to pass to the generator
381  // -- a1 : the JSGeneratorObject to resume
382  // -- ra : return address
383  // -----------------------------------
384  __ AssertGeneratorObject(a1);
385 
386  // Store input value into generator object.
387  __ Sd(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset));
388  __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3,
389  kRAHasNotBeenSaved, kDontSaveFPRegs);
390 
391  // Load suspended function and context.
392  __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
393  __ Ld(cp, FieldMemOperand(a4, JSFunction::kContextOffset));
394 
395  // Flood function if we are stepping.
396  Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
397  Label stepping_prepared;
398  ExternalReference debug_hook =
399  ExternalReference::debug_hook_on_function_call_address(masm->isolate());
400  __ li(a5, debug_hook);
401  __ Lb(a5, MemOperand(a5));
402  __ Branch(&prepare_step_in_if_stepping, ne, a5, Operand(zero_reg));
403 
404  // Flood function if we need to continue stepping in the suspended generator.
405  ExternalReference debug_suspended_generator =
406  ExternalReference::debug_suspended_generator_address(masm->isolate());
407  __ li(a5, debug_suspended_generator);
408  __ Ld(a5, MemOperand(a5));
409  __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(a5));
410  __ bind(&stepping_prepared);
411 
412  // Check the stack for overflow. We are not trying to catch interruptions
413  // (i.e. debug break and preemption) here, so check the "real stack limit".
414  Label stack_overflow;
415  __ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
416  __ Branch(&stack_overflow, lo, sp, Operand(kScratchReg));
417 
418  // Push receiver.
419  __ Ld(a5, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
420  __ Push(a5);
421 
422  // ----------- S t a t e -------------
423  // -- a1 : the JSGeneratorObject to resume
424  // -- a4 : generator function
425  // -- cp : generator context
426  // -- ra : return address
427  // -- sp[0] : generator receiver
428  // -----------------------------------
429 
430  // Push holes for arguments to generator function. Since the parser forced
431  // context allocation for any variables in generators, the actual argument
432  // values have already been copied into the context and these dummy values
433  // will never be used.
434  __ Ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
435  __ Lhu(a3,
436  FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
437  __ Ld(t1,
438  FieldMemOperand(a1, JSGeneratorObject::kParametersAndRegistersOffset));
439  {
440  Label done_loop, loop;
441  __ Move(t2, zero_reg);
442  __ bind(&loop);
443  __ Dsubu(a3, a3, Operand(1));
444  __ Branch(&done_loop, lt, a3, Operand(zero_reg));
445  __ Dlsa(kScratchReg, t1, t2, kPointerSizeLog2);
446  __ Ld(kScratchReg, FieldMemOperand(kScratchReg, FixedArray::kHeaderSize));
447  __ Push(kScratchReg);
448  __ Daddu(t2, t2, Operand(1));
449  __ Branch(&loop);
450  __ bind(&done_loop);
451  }
452 
453  // Underlying function needs to have bytecode available.
454  if (FLAG_debug_code) {
455  __ Ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
456  __ Ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
457  GetSharedFunctionInfoBytecode(masm, a3, a0);
458  __ GetObjectType(a3, a3, a3);
459  __ Assert(eq, AbortReason::kMissingBytecodeArray, a3,
460  Operand(BYTECODE_ARRAY_TYPE));
461  }
462 
463  // Resume (Ignition/TurboFan) generator object.
464  {
465  __ Ld(a0, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
466  __ Lhu(a0, FieldMemOperand(
467  a0, SharedFunctionInfo::kFormalParameterCountOffset));
468  // We abuse new.target both to indicate that this is a resume call and to
469  // pass in the generator object. In ordinary calls, new.target is always
470  // undefined because generator functions are non-constructable.
471  __ Move(a3, a1);
472  __ Move(a1, a4);
473  static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
474  __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
475  __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
476  __ Jump(a2);
477  }
478 
479  __ bind(&prepare_step_in_if_stepping);
480  {
481  FrameScope scope(masm, StackFrame::INTERNAL);
482  __ Push(a1, a4);
483  // Push hole as receiver since we do not use it for stepping.
484  __ PushRoot(RootIndex::kTheHoleValue);
485  __ CallRuntime(Runtime::kDebugOnFunctionCall);
486  __ Pop(a1);
487  }
488  __ Branch(USE_DELAY_SLOT, &stepping_prepared);
489  __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
490 
491  __ bind(&prepare_step_in_suspended_generator);
492  {
493  FrameScope scope(masm, StackFrame::INTERNAL);
494  __ Push(a1);
495  __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
496  __ Pop(a1);
497  }
498  __ Branch(USE_DELAY_SLOT, &stepping_prepared);
499  __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
500 
501  __ bind(&stack_overflow);
502  {
503  FrameScope scope(masm, StackFrame::INTERNAL);
504  __ CallRuntime(Runtime::kThrowStackOverflow);
505  __ break_(0xCC); // This should be unreachable.
506  }
507 }
508 
509 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
510  FrameScope scope(masm, StackFrame::INTERNAL);
511  __ Push(a1);
512  __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
513 }
514 
515 // Clobbers a2; preserves all other registers.
516 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
517  // Check the stack for overflow. We are not trying to catch
518  // interruptions (e.g. debug break and preemption) here, so the "real stack
519  // limit" is checked.
520  Label okay;
521  __ LoadRoot(a2, RootIndex::kRealStackLimit);
522  // Make a2 the space we have left. The stack might already be overflowed
523  // here which will cause r2 to become negative.
524  __ dsubu(a2, sp, a2);
525  // Check if the arguments will overflow the stack.
526  __ dsll(a7, argc, kPointerSizeLog2);
527  __ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison.
528 
529  // Out of stack space.
530  __ CallRuntime(Runtime::kThrowStackOverflow);
531 
532  __ bind(&okay);
533 }
534 
535 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
536  bool is_construct) {
537  // ----------- S t a t e -------------
538  // -- a0: new.target
539  // -- a1: function
540  // -- a2: receiver_pointer
541  // -- a3: argc
542  // -- s0: argv
543  // -----------------------------------
544 
545  // Enter an internal frame.
546  {
547  FrameScope scope(masm, StackFrame::INTERNAL);
548 
549  // Setup the context (we need to use the caller context from the isolate).
550  ExternalReference context_address = ExternalReference::Create(
551  IsolateAddressId::kContextAddress, masm->isolate());
552  __ li(cp, context_address);
553  __ Ld(cp, MemOperand(cp));
554 
555  // Push the function and the receiver onto the stack.
556  __ Push(a1, a2);
557 
558  // Check if we have enough stack space to push all arguments.
559  // Clobbers a2.
560  Generate_CheckStackOverflow(masm, a3);
561 
562  // Remember new.target.
563  __ mov(a5, a0);
564 
565  // Copy arguments to the stack in a loop.
566  // a3: argc
567  // s0: argv, i.e. points to first arg
568  Label loop, entry;
569  __ Dlsa(a6, s0, a3, kPointerSizeLog2);
570  __ b(&entry);
571  __ nop(); // Branch delay slot nop.
572  // a6 points past last arg.
573  __ bind(&loop);
574  __ Ld(a4, MemOperand(s0)); // Read next parameter.
575  __ daddiu(s0, s0, kPointerSize);
576  __ Ld(a4, MemOperand(a4)); // Dereference handle.
577  __ push(a4); // Push parameter.
578  __ bind(&entry);
579  __ Branch(&loop, ne, s0, Operand(a6));
580 
581  // Setup new.target and argc.
582  __ mov(a0, a3);
583  __ mov(a3, a5);
584 
585  // Initialize all JavaScript callee-saved registers, since they will be seen
586  // by the garbage collector as part of handlers.
587  __ LoadRoot(a4, RootIndex::kUndefinedValue);
588  __ mov(s1, a4);
589  __ mov(s2, a4);
590  __ mov(s3, a4);
591  __ mov(s4, a4);
592  __ mov(s5, a4);
593  // s6 holds the root address. Do not clobber.
594  // s7 is cp. Do not init.
595 
596  // Invoke the code.
597  Handle<Code> builtin = is_construct
598  ? BUILTIN_CODE(masm->isolate(), Construct)
599  : masm->isolate()->builtins()->Call();
600  __ Call(builtin, RelocInfo::CODE_TARGET);
601 
602  // Leave internal frame.
603  }
604  __ Jump(ra);
605 }
606 
607 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
608  Generate_JSEntryTrampolineHelper(masm, false);
609 }
610 
611 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
612  Generate_JSEntryTrampolineHelper(masm, true);
613 }
614 
615 static void ReplaceClosureCodeWithOptimizedCode(
616  MacroAssembler* masm, Register optimized_code, Register closure,
617  Register scratch1, Register scratch2, Register scratch3) {
618  // Store code entry in the closure.
619  __ Sd(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset));
620  __ mov(scratch1, optimized_code); // Write barrier clobbers scratch1 below.
621  __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
622  kRAHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
623  OMIT_SMI_CHECK);
624 }
625 
626 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
627  Register args_count = scratch;
628 
629  // Get the arguments + receiver count.
630  __ Ld(args_count,
631  MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
632  __ Lw(t0, FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
633 
634  // Leave the frame (also dropping the register file).
635  __ LeaveFrame(StackFrame::INTERPRETED);
636 
637  // Drop receiver + arguments.
638  __ Daddu(sp, sp, args_count);
639 }
640 
641 // Tail-call |function_id| if |smi_entry| == |marker|
642 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
643  Register smi_entry,
644  OptimizationMarker marker,
645  Runtime::FunctionId function_id) {
646  Label no_match;
647  __ Branch(&no_match, ne, smi_entry, Operand(Smi::FromEnum(marker)));
648  GenerateTailCallToReturnedCode(masm, function_id);
649  __ bind(&no_match);
650 }
651 
652 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
653  Register feedback_vector,
654  Register scratch1, Register scratch2,
655  Register scratch3) {
656  // ----------- S t a t e -------------
657  // -- a0 : argument count (preserved for callee if needed, and caller)
658  // -- a3 : new target (preserved for callee if needed, and caller)
659  // -- a1 : target function (preserved for callee if needed, and caller)
660  // -- feedback vector (preserved for caller if needed)
661  // -----------------------------------
662  DCHECK(
663  !AreAliased(feedback_vector, a0, a1, a3, scratch1, scratch2, scratch3));
664 
665  Label optimized_code_slot_is_weak_ref, fallthrough;
666 
667  Register closure = a1;
668  Register optimized_code_entry = scratch1;
669 
670  __ Ld(optimized_code_entry,
671  FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
672 
673  // Check if the code entry is a Smi. If yes, we interpret it as an
674  // optimisation marker. Otherwise, interpret it as a weak reference to a code
675  // object.
676  __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
677 
678  {
679  // Optimized code slot is a Smi optimization marker.
680 
681  // Fall through if no optimization trigger.
682  __ Branch(&fallthrough, eq, optimized_code_entry,
683  Operand(Smi::FromEnum(OptimizationMarker::kNone)));
684 
685  TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
686  OptimizationMarker::kLogFirstExecution,
687  Runtime::kFunctionFirstExecution);
688  TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
689  OptimizationMarker::kCompileOptimized,
690  Runtime::kCompileOptimized_NotConcurrent);
691  TailCallRuntimeIfMarkerEquals(
692  masm, optimized_code_entry,
693  OptimizationMarker::kCompileOptimizedConcurrent,
694  Runtime::kCompileOptimized_Concurrent);
695 
696  {
697  // Otherwise, the marker is InOptimizationQueue, so fall through hoping
698  // that an interrupt will eventually update the slot with optimized code.
699  if (FLAG_debug_code) {
700  __ Assert(
701  eq, AbortReason::kExpectedOptimizationSentinel,
702  optimized_code_entry,
703  Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
704  }
705  __ jmp(&fallthrough);
706  }
707  }
708 
709  {
710  // Optimized code slot is a weak reference.
711  __ bind(&optimized_code_slot_is_weak_ref);
712 
713  __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
714 
715  // Check if the optimized code is marked for deopt. If it is, call the
716  // runtime to clear it.
717  Label found_deoptimized_code;
718  __ Ld(a5, FieldMemOperand(optimized_code_entry,
719  Code::kCodeDataContainerOffset));
720  __ Lw(a5, FieldMemOperand(a5, CodeDataContainer::kKindSpecificFlagsOffset));
721  __ And(a5, a5, Operand(1 << Code::kMarkedForDeoptimizationBit));
722  __ Branch(&found_deoptimized_code, ne, a5, Operand(zero_reg));
723 
724  // Optimized code is good, get it into the closure and link the closure into
725  // the optimized functions list, then tail call the optimized code.
726  // The feedback vector is no longer used, so re-use it as a scratch
727  // register.
728  ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
729  scratch2, scratch3, feedback_vector);
730 
731  static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
732  __ Daddu(a2, optimized_code_entry,
733  Operand(Code::kHeaderSize - kHeapObjectTag));
734  __ Jump(a2);
735 
736  // Optimized code slot contains deoptimized code, evict it and re-enter the
737  // losure's code.
738  __ bind(&found_deoptimized_code);
739  GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
740  }
741 
742  // Fall-through if the optimized code cell is clear and there is no
743  // optimization marker.
744  __ bind(&fallthrough);
745 }
746 
747 // Advance the current bytecode offset. This simulates what all bytecode
748 // handlers do upon completion of the underlying operation. Will bail out to a
749 // label if the bytecode (without prefix) is a return bytecode.
750 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
751  Register bytecode_array,
752  Register bytecode_offset,
753  Register bytecode, Register scratch1,
754  Register scratch2, Label* if_return) {
755  Register bytecode_size_table = scratch1;
756  DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
757  bytecode));
758  __ li(bytecode_size_table, ExternalReference::bytecode_size_table_address());
759 
760  // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
761  Label process_bytecode, extra_wide;
762  STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
763  STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
764  STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
765  STATIC_ASSERT(3 ==
766  static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
767  __ Branch(&process_bytecode, hi, bytecode, Operand(3));
768  __ And(scratch2, bytecode, Operand(1));
769  __ Branch(&extra_wide, ne, scratch2, Operand(zero_reg));
770 
771  // Load the next bytecode and update table to the wide scaled table.
772  __ Daddu(bytecode_offset, bytecode_offset, Operand(1));
773  __ Daddu(scratch2, bytecode_array, bytecode_offset);
774  __ Lbu(bytecode, MemOperand(scratch2));
775  __ Daddu(bytecode_size_table, bytecode_size_table,
776  Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
777  __ jmp(&process_bytecode);
778 
779  __ bind(&extra_wide);
780  // Load the next bytecode and update table to the extra wide scaled table.
781  __ Daddu(bytecode_offset, bytecode_offset, Operand(1));
782  __ Daddu(scratch2, bytecode_array, bytecode_offset);
783  __ Lbu(bytecode, MemOperand(scratch2));
784  __ Daddu(bytecode_size_table, bytecode_size_table,
785  Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
786 
787  __ bind(&process_bytecode);
788 
789 // Bailout to the return label if this is a return bytecode.
790 #define JUMP_IF_EQUAL(NAME) \
791  __ Branch(if_return, eq, bytecode, \
792  Operand(static_cast<int>(interpreter::Bytecode::k##NAME)));
793  RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
794 #undef JUMP_IF_EQUAL
795 
796  // Otherwise, load the size of the current bytecode and advance the offset.
797  __ Dlsa(scratch2, bytecode_size_table, bytecode, 2);
798  __ Lw(scratch2, MemOperand(scratch2));
799  __ Daddu(bytecode_offset, bytecode_offset, scratch2);
800 }
801 
802 // Generate code for entering a JS function with the interpreter.
803 // On entry to the function the receiver and arguments have been pushed on the
804 // stack left to right. The actual argument count matches the formal parameter
805 // count expected by the function.
806 //
807 // The live registers are:
808 // o a1: the JS function object being called.
809 // o a3: the incoming new target or generator object
810 // o cp: our context
811 // o fp: the caller's frame pointer
812 // o sp: stack pointer
813 // o ra: return address
814 //
815 // The function builds an interpreter frame. See InterpreterFrameConstants in
816 // frames.h for its layout.
817 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
818  Register closure = a1;
819  Register feedback_vector = a2;
820 
821  // Load the feedback vector from the closure.
822  __ Ld(feedback_vector,
823  FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
824  __ Ld(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset));
825  // Read off the optimized code slot in the feedback vector, and if there
826  // is optimized code or an optimization marker, call that instead.
827  MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, a4, t3, a5);
828 
829  // Open a frame scope to indicate that there is a frame on the stack. The
830  // MANUAL indicates that the scope shouldn't actually generate code to set up
831  // the frame (that is done below).
832  FrameScope frame_scope(masm, StackFrame::MANUAL);
833  __ PushStandardFrame(closure);
834 
835  // Get the bytecode array from the function object and load it into
836  // kInterpreterBytecodeArrayRegister.
837  __ Ld(a0, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
838  __ Ld(kInterpreterBytecodeArrayRegister,
839  FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
840  GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, a4);
841 
842  // Increment invocation count for the function.
843  __ Lw(a4, FieldMemOperand(feedback_vector,
844  FeedbackVector::kInvocationCountOffset));
845  __ Addu(a4, a4, Operand(1));
846  __ Sw(a4, FieldMemOperand(feedback_vector,
847  FeedbackVector::kInvocationCountOffset));
848 
849  // Check function data field is actually a BytecodeArray object.
850  if (FLAG_debug_code) {
851  __ SmiTst(kInterpreterBytecodeArrayRegister, a4);
852  __ Assert(ne,
853  AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
854  a4, Operand(zero_reg));
855  __ GetObjectType(kInterpreterBytecodeArrayRegister, a4, a4);
856  __ Assert(eq,
857  AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
858  a4, Operand(BYTECODE_ARRAY_TYPE));
859  }
860 
861  // Reset code age.
862  DCHECK_EQ(0, BytecodeArray::kNoAgeBytecodeAge);
863  __ sb(zero_reg, FieldMemOperand(kInterpreterBytecodeArrayRegister,
864  BytecodeArray::kBytecodeAgeOffset));
865 
866  // Load initial bytecode offset.
867  __ li(kInterpreterBytecodeOffsetRegister,
868  Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
869 
870  // Push bytecode array and Smi tagged bytecode array offset.
871  __ SmiTag(a4, kInterpreterBytecodeOffsetRegister);
872  __ Push(kInterpreterBytecodeArrayRegister, a4);
873 
874  // Allocate the local and temporary register file on the stack.
875  {
876  // Load frame size (word) from the BytecodeArray object.
877  __ Lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
878  BytecodeArray::kFrameSizeOffset));
879 
880  // Do a stack check to ensure we don't go over the limit.
881  Label ok;
882  __ Dsubu(a5, sp, Operand(a4));
883  __ LoadRoot(a2, RootIndex::kRealStackLimit);
884  __ Branch(&ok, hs, a5, Operand(a2));
885  __ CallRuntime(Runtime::kThrowStackOverflow);
886  __ bind(&ok);
887 
888  // If ok, push undefined as the initial value for all register file entries.
889  Label loop_header;
890  Label loop_check;
891  __ LoadRoot(a5, RootIndex::kUndefinedValue);
892  __ Branch(&loop_check);
893  __ bind(&loop_header);
894  // TODO(rmcilroy): Consider doing more than one push per loop iteration.
895  __ push(a5);
896  // Continue loop if not done.
897  __ bind(&loop_check);
898  __ Dsubu(a4, a4, Operand(kPointerSize));
899  __ Branch(&loop_header, ge, a4, Operand(zero_reg));
900  }
901 
902  // If the bytecode array has a valid incoming new target or generator object
903  // register, initialize it with incoming value which was passed in r3.
904  Label no_incoming_new_target_or_generator_register;
905  __ Lw(a5, FieldMemOperand(
906  kInterpreterBytecodeArrayRegister,
907  BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
908  __ Branch(&no_incoming_new_target_or_generator_register, eq, a5,
909  Operand(zero_reg));
910  __ Dlsa(a5, fp, a5, kPointerSizeLog2);
911  __ Sd(a3, MemOperand(a5));
912  __ bind(&no_incoming_new_target_or_generator_register);
913 
914  // Load accumulator as undefined.
915  __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
916 
917  // Load the dispatch table into a register and dispatch to the bytecode
918  // handler at the current bytecode offset.
919  Label do_dispatch;
920  __ bind(&do_dispatch);
921  __ li(kInterpreterDispatchTableRegister,
922  ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
923  __ Daddu(a0, kInterpreterBytecodeArrayRegister,
924  kInterpreterBytecodeOffsetRegister);
925  __ Lbu(a7, MemOperand(a0));
926  __ Dlsa(kScratchReg, kInterpreterDispatchTableRegister, a7, kPointerSizeLog2);
927  __ Ld(kJavaScriptCallCodeStartRegister, MemOperand(kScratchReg));
928  __ Call(kJavaScriptCallCodeStartRegister);
929  masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
930 
931  // Any returns to the entry trampoline are either due to the return bytecode
932  // or the interpreter tail calling a builtin and then a dispatch.
933 
934  // Get bytecode array and bytecode offset from the stack frame.
935  __ Ld(kInterpreterBytecodeArrayRegister,
936  MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
937  __ Ld(kInterpreterBytecodeOffsetRegister,
938  MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
939  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
940 
941  // Either return, or advance to the next bytecode and dispatch.
942  Label do_return;
943  __ Daddu(a1, kInterpreterBytecodeArrayRegister,
944  kInterpreterBytecodeOffsetRegister);
945  __ Lbu(a1, MemOperand(a1));
946  AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
947  kInterpreterBytecodeOffsetRegister, a1, a2, a3,
948  &do_return);
949  __ jmp(&do_dispatch);
950 
951  __ bind(&do_return);
952  // The return value is in v0.
953  LeaveInterpreterFrame(masm, t0);
954  __ Jump(ra);
955 }
956 
957 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
958  Register num_args, Register index,
959  Register scratch, Register scratch2) {
960  // Find the address of the last argument.
961  __ mov(scratch2, num_args);
962  __ dsll(scratch2, scratch2, kPointerSizeLog2);
963  __ Dsubu(scratch2, index, Operand(scratch2));
964 
965  // Push the arguments.
966  Label loop_header, loop_check;
967  __ Branch(&loop_check);
968  __ bind(&loop_header);
969  __ Ld(scratch, MemOperand(index));
970  __ Daddu(index, index, Operand(-kPointerSize));
971  __ push(scratch);
972  __ bind(&loop_check);
973  __ Branch(&loop_header, gt, index, Operand(scratch2));
974 }
975 
976 // static
977 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
978  MacroAssembler* masm, ConvertReceiverMode receiver_mode,
979  InterpreterPushArgsMode mode) {
980  DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
981  // ----------- S t a t e -------------
982  // -- a0 : the number of arguments (not including the receiver)
983  // -- a2 : the address of the first argument to be pushed. Subsequent
984  // arguments should be consecutive above this, in the same order as
985  // they are to be pushed onto the stack.
986  // -- a1 : the target to call (can be any Object).
987  // -----------------------------------
988  Label stack_overflow;
989 
990  __ Daddu(a3, a0, Operand(1)); // Add one for receiver.
991 
992  // Push "undefined" as the receiver arg if we need to.
993  if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
994  __ PushRoot(RootIndex::kUndefinedValue);
995  __ Dsubu(a3, a3, Operand(1)); // Subtract one for receiver.
996  }
997 
998  Generate_StackOverflowCheck(masm, a3, a4, t0, &stack_overflow);
999 
1000  // This function modifies a2, t0 and a4.
1001  Generate_InterpreterPushArgs(masm, a3, a2, a4, t0);
1002 
1003  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1004  __ Pop(a2); // Pass the spread in a register
1005  __ Dsubu(a0, a0, Operand(1)); // Subtract one for spread
1006  }
1007 
1008  // Call the target.
1009  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1010  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1011  RelocInfo::CODE_TARGET);
1012  } else {
1013  __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1014  RelocInfo::CODE_TARGET);
1015  }
1016 
1017  __ bind(&stack_overflow);
1018  {
1019  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1020  // Unreachable code.
1021  __ break_(0xCC);
1022  }
1023 }
1024 
1025 // static
1026 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1027  MacroAssembler* masm, InterpreterPushArgsMode mode) {
1028  // ----------- S t a t e -------------
1029  // -- a0 : argument count (not including receiver)
1030  // -- a3 : new target
1031  // -- a1 : constructor to call
1032  // -- a2 : allocation site feedback if available, undefined otherwise.
1033  // -- a4 : address of the first argument
1034  // -----------------------------------
1035  Label stack_overflow;
1036 
1037  // Push a slot for the receiver.
1038  __ push(zero_reg);
1039 
1040  Generate_StackOverflowCheck(masm, a0, a5, t0, &stack_overflow);
1041 
1042  // This function modifies t0, a4 and a5.
1043  Generate_InterpreterPushArgs(masm, a0, a4, a5, t0);
1044 
1045  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1046  __ Pop(a2); // Pass the spread in a register
1047  __ Dsubu(a0, a0, Operand(1)); // Subtract one for spread
1048  } else {
1049  __ AssertUndefinedOrAllocationSite(a2, t0);
1050  }
1051 
1052  if (mode == InterpreterPushArgsMode::kArrayFunction) {
1053  __ AssertFunction(a1);
1054 
1055  // Tail call to the function-specific construct stub (still in the caller
1056  // context at this point).
1057  __ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
1058  RelocInfo::CODE_TARGET);
1059  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1060  // Call the constructor with a0, a1, and a3 unmodified.
1061  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1062  RelocInfo::CODE_TARGET);
1063  } else {
1064  DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1065  // Call the constructor with a0, a1, and a3 unmodified.
1066  __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1067  }
1068 
1069  __ bind(&stack_overflow);
1070  {
1071  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1072  // Unreachable code.
1073  __ break_(0xCC);
1074  }
1075 }
1076 
1077 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1078  // Set the return address to the correct point in the interpreter entry
1079  // trampoline.
1080  Label builtin_trampoline, trampoline_loaded;
1081  Smi interpreter_entry_return_pc_offset(
1082  masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1083  DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1084 
1085  // If the SFI function_data is an InterpreterData, the function will have a
1086  // custom copy of the interpreter entry trampoline for profiling. If so,
1087  // get the custom trampoline, otherwise grab the entry address of the global
1088  // trampoline.
1089  __ Ld(t0, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1090  __ Ld(t0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
1091  __ Ld(t0, FieldMemOperand(t0, SharedFunctionInfo::kFunctionDataOffset));
1092  __ GetObjectType(t0, kInterpreterDispatchTableRegister,
1093  kInterpreterDispatchTableRegister);
1094  __ Branch(&builtin_trampoline, ne, kInterpreterDispatchTableRegister,
1095  Operand(INTERPRETER_DATA_TYPE));
1096 
1097  __ Ld(t0, FieldMemOperand(t0, InterpreterData::kInterpreterTrampolineOffset));
1098  __ Daddu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1099  __ Branch(&trampoline_loaded);
1100 
1101  __ bind(&builtin_trampoline);
1102  __ li(t0, ExternalReference::
1103  address_of_interpreter_entry_trampoline_instruction_start(
1104  masm->isolate()));
1105  __ Ld(t0, MemOperand(t0));
1106 
1107  __ bind(&trampoline_loaded);
1108  __ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset->value()));
1109 
1110  // Initialize the dispatch table register.
1111  __ li(kInterpreterDispatchTableRegister,
1112  ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1113 
1114  // Get the bytecode array pointer from the frame.
1115  __ Ld(kInterpreterBytecodeArrayRegister,
1116  MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1117 
1118  if (FLAG_debug_code) {
1119  // Check function data field is actually a BytecodeArray object.
1120  __ SmiTst(kInterpreterBytecodeArrayRegister, kScratchReg);
1121  __ Assert(ne,
1122  AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1123  kScratchReg, Operand(zero_reg));
1124  __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1125  __ Assert(eq,
1126  AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1127  a1, Operand(BYTECODE_ARRAY_TYPE));
1128  }
1129 
1130  // Get the target bytecode offset from the frame.
1131  __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1132  MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1133 
1134  // Dispatch to the target bytecode.
1135  __ Daddu(a1, kInterpreterBytecodeArrayRegister,
1136  kInterpreterBytecodeOffsetRegister);
1137  __ Lbu(a7, MemOperand(a1));
1138  __ Dlsa(a1, kInterpreterDispatchTableRegister, a7, kPointerSizeLog2);
1139  __ Ld(kJavaScriptCallCodeStartRegister, MemOperand(a1));
1140  __ Jump(kJavaScriptCallCodeStartRegister);
1141 }
1142 
1143 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1144  // Advance the current bytecode offset stored within the given interpreter
1145  // stack frame. This simulates what all bytecode handlers do upon completion
1146  // of the underlying operation.
1147  __ Ld(kInterpreterBytecodeArrayRegister,
1148  MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1149  __ Ld(kInterpreterBytecodeOffsetRegister,
1150  MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1151  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1152 
1153  // Load the current bytecode.
1154  __ Daddu(a1, kInterpreterBytecodeArrayRegister,
1155  kInterpreterBytecodeOffsetRegister);
1156  __ Lbu(a1, MemOperand(a1));
1157 
1158  // Advance to the next bytecode.
1159  Label if_return;
1160  AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1161  kInterpreterBytecodeOffsetRegister, a1, a2, a3,
1162  &if_return);
1163 
1164  // Convert new bytecode offset to a Smi and save in the stackframe.
1165  __ SmiTag(a2, kInterpreterBytecodeOffsetRegister);
1166  __ Sd(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1167 
1168  Generate_InterpreterEnterBytecode(masm);
1169 
1170  // We should never take the if_return path.
1171  __ bind(&if_return);
1172  __ Abort(AbortReason::kInvalidBytecodeAdvance);
1173 }
1174 
1175 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1176  Generate_InterpreterEnterBytecode(masm);
1177 }
1178 
1179 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1180  // ----------- S t a t e -------------
1181  // -- a0 : argument count (preserved for callee)
1182  // -- a1 : new target (preserved for callee)
1183  // -- a3 : target function (preserved for callee)
1184  // -----------------------------------
1185  Label failed;
1186  {
1187  FrameScope scope(masm, StackFrame::INTERNAL);
1188  // Push a copy of the target function and the new target.
1189  // Push function as parameter to the runtime call.
1190  __ Move(t2, a0);
1191  __ SmiTag(a0);
1192  __ Push(a0, a1, a3, a1);
1193 
1194  // Copy arguments from caller (stdlib, foreign, heap).
1195  Label args_done;
1196  for (int j = 0; j < 4; ++j) {
1197  Label over;
1198  if (j < 3) {
1199  __ Branch(&over, ne, t2, Operand(j));
1200  }
1201  for (int i = j - 1; i >= 0; --i) {
1202  __ Ld(t2, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1203  i * kPointerSize));
1204  __ push(t2);
1205  }
1206  for (int i = 0; i < 3 - j; ++i) {
1207  __ PushRoot(RootIndex::kUndefinedValue);
1208  }
1209  if (j < 3) {
1210  __ jmp(&args_done);
1211  __ bind(&over);
1212  }
1213  }
1214  __ bind(&args_done);
1215 
1216  // Call runtime, on success unwind frame, and parent frame.
1217  __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1218  // A smi 0 is returned on failure, an object on success.
1219  __ JumpIfSmi(v0, &failed);
1220 
1221  __ Drop(2);
1222  __ pop(t2);
1223  __ SmiUntag(t2);
1224  scope.GenerateLeaveFrame();
1225 
1226  __ Daddu(t2, t2, Operand(1));
1227  __ Dlsa(sp, sp, t2, kPointerSizeLog2);
1228  __ Ret();
1229 
1230  __ bind(&failed);
1231  // Restore target function and new target.
1232  __ Pop(a0, a1, a3);
1233  __ SmiUntag(a0);
1234  }
1235  // On failure, tail call back to regular js by re-calling the function
1236  // which has be reset to the compile lazy builtin.
1237  static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
1238  __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
1239  __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
1240  __ Jump(a2);
1241 }
1242 
1243 namespace {
1244 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1245  bool java_script_builtin,
1246  bool with_result) {
1247  const RegisterConfiguration* config(RegisterConfiguration::Default());
1248  int allocatable_register_count = config->num_allocatable_general_registers();
1249  if (with_result) {
1250  // Overwrite the hole inserted by the deoptimizer with the return value from
1251  // the LAZY deopt point.
1252  __ Sd(v0,
1253  MemOperand(
1254  sp, config->num_allocatable_general_registers() * kPointerSize +
1255  BuiltinContinuationFrameConstants::kFixedFrameSize));
1256  }
1257  for (int i = allocatable_register_count - 1; i >= 0; --i) {
1258  int code = config->GetAllocatableGeneralCode(i);
1259  __ Pop(Register::from_code(code));
1260  if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1261  __ SmiUntag(Register::from_code(code));
1262  }
1263  }
1264  __ Ld(fp, MemOperand(
1265  sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1266  __ Pop(t0);
1267  __ Daddu(sp, sp,
1268  Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1269  __ Pop(ra);
1270  __ Daddu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1271  __ Jump(t0);
1272 }
1273 } // namespace
1274 
1275 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1276  Generate_ContinueToBuiltinHelper(masm, false, false);
1277 }
1278 
1279 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1280  MacroAssembler* masm) {
1281  Generate_ContinueToBuiltinHelper(masm, false, true);
1282 }
1283 
1284 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1285  Generate_ContinueToBuiltinHelper(masm, true, false);
1286 }
1287 
1288 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1289  MacroAssembler* masm) {
1290  Generate_ContinueToBuiltinHelper(masm, true, true);
1291 }
1292 
1293 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1294  {
1295  FrameScope scope(masm, StackFrame::INTERNAL);
1296  __ CallRuntime(Runtime::kNotifyDeoptimized);
1297  }
1298 
1299  DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
1300  __ Ld(v0, MemOperand(sp, 0 * kPointerSize));
1301  __ Ret(USE_DELAY_SLOT);
1302  // Safe to fill delay slot Addu will emit one instruction.
1303  __ Daddu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1304 }
1305 
1306 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1307  // Lookup the function in the JavaScript frame.
1308  __ Ld(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1309  __ Ld(a0, MemOperand(a0, JavaScriptFrameConstants::kFunctionOffset));
1310 
1311  {
1312  FrameScope scope(masm, StackFrame::INTERNAL);
1313  // Pass function as argument.
1314  __ push(a0);
1315  __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1316  }
1317 
1318  // If the code object is null, just return to the caller.
1319  __ Ret(eq, v0, Operand(Smi::zero()));
1320 
1321  // Drop the handler frame that is be sitting on top of the actual
1322  // JavaScript frame. This is the case then OSR is triggered from bytecode.
1323  __ LeaveFrame(StackFrame::STUB);
1324 
1325  // Load deoptimization data from the code object.
1326  // <deopt_data> = <code>[#deoptimization_data_offset]
1327  __ Ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1328 
1329  // Load the OSR entrypoint offset from the deoptimization data.
1330  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1331  __ SmiUntag(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1332  DeoptimizationData::kOsrPcOffsetIndex) -
1333  kHeapObjectTag));
1334 
1335  // Compute the target address = code_obj + header_size + osr_offset
1336  // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1337  __ Daddu(v0, v0, a1);
1338  __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1339 
1340  // And "return" to the OSR entry point of the function.
1341  __ Ret();
1342 }
1343 
1344 // static
1345 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1346  // ----------- S t a t e -------------
1347  // -- a0 : argc
1348  // -- sp[0] : argArray
1349  // -- sp[4] : thisArg
1350  // -- sp[8] : receiver
1351  // -----------------------------------
1352 
1353  Register argc = a0;
1354  Register arg_array = a2;
1355  Register receiver = a1;
1356  Register this_arg = a5;
1357  Register undefined_value = a3;
1358  Register scratch = a4;
1359 
1360  __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
1361 
1362  // 1. Load receiver into a1, argArray into a2 (if present), remove all
1363  // arguments from the stack (including the receiver), and push thisArg (if
1364  // present) instead.
1365  {
1366  // Claim (2 - argc) dummy arguments form the stack, to put the stack in a
1367  // consistent state for a simple pop operation.
1368 
1369  __ Dsubu(sp, sp, Operand(2 * kPointerSize));
1370  __ Dlsa(sp, sp, argc, kPointerSizeLog2);
1371  __ mov(scratch, argc);
1372  __ Pop(this_arg, arg_array); // Overwrite argc
1373  __ Movz(arg_array, undefined_value, scratch); // if argc == 0
1374  __ Movz(this_arg, undefined_value, scratch); // if argc == 0
1375  __ Dsubu(scratch, scratch, Operand(1));
1376  __ Movz(arg_array, undefined_value, scratch); // if argc == 1
1377  __ Ld(receiver, MemOperand(sp));
1378  __ Sd(this_arg, MemOperand(sp));
1379  }
1380 
1381  // ----------- S t a t e -------------
1382  // -- a2 : argArray
1383  // -- a1 : receiver
1384  // -- a3 : undefined root value
1385  // -- sp[0] : thisArg
1386  // -----------------------------------
1387 
1388  // 2. We don't need to check explicitly for callable receiver here,
1389  // since that's the first thing the Call/CallWithArrayLike builtins
1390  // will do.
1391 
1392  // 3. Tail call with no arguments if argArray is null or undefined.
1393  Label no_arguments;
1394  __ JumpIfRoot(arg_array, RootIndex::kNullValue, &no_arguments);
1395  __ Branch(&no_arguments, eq, arg_array, Operand(undefined_value));
1396 
1397  // 4a. Apply the receiver to the given argArray.
1398  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1399  RelocInfo::CODE_TARGET);
1400 
1401  // 4b. The argArray is either null or undefined, so we tail call without any
1402  // arguments to the receiver.
1403  __ bind(&no_arguments);
1404  {
1405  __ mov(a0, zero_reg);
1406  DCHECK(receiver == a1);
1407  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1408  }
1409 }
1410 
1411 // static
1412 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1413  // 1. Make sure we have at least one argument.
1414  // a0: actual number of arguments
1415  {
1416  Label done;
1417  __ Branch(&done, ne, a0, Operand(zero_reg));
1418  __ PushRoot(RootIndex::kUndefinedValue);
1419  __ Daddu(a0, a0, Operand(1));
1420  __ bind(&done);
1421  }
1422 
1423  // 2. Get the function to call (passed as receiver) from the stack.
1424  // a0: actual number of arguments
1425  __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
1426  __ Ld(a1, MemOperand(kScratchReg));
1427 
1428  // 3. Shift arguments and return address one slot down on the stack
1429  // (overwriting the original receiver). Adjust argument count to make
1430  // the original first argument the new receiver.
1431  // a0: actual number of arguments
1432  // a1: function
1433  {
1434  Label loop;
1435  // Calculate the copy start address (destination). Copy end address is sp.
1436  __ Dlsa(a2, sp, a0, kPointerSizeLog2);
1437 
1438  __ bind(&loop);
1439  __ Ld(kScratchReg, MemOperand(a2, -kPointerSize));
1440  __ Sd(kScratchReg, MemOperand(a2));
1441  __ Dsubu(a2, a2, Operand(kPointerSize));
1442  __ Branch(&loop, ne, a2, Operand(sp));
1443  // Adjust the actual number of arguments and remove the top element
1444  // (which is a copy of the last argument).
1445  __ Dsubu(a0, a0, Operand(1));
1446  __ Pop();
1447  }
1448 
1449  // 4. Call the callable.
1450  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1451 }
1452 
1453 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1454  // ----------- S t a t e -------------
1455  // -- a0 : argc
1456  // -- sp[0] : argumentsList (if argc ==3)
1457  // -- sp[4] : thisArgument (if argc >=2)
1458  // -- sp[8] : target (if argc >=1)
1459  // -- sp[12] : receiver
1460  // -----------------------------------
1461 
1462  Register argc = a0;
1463  Register arguments_list = a2;
1464  Register target = a1;
1465  Register this_argument = a5;
1466  Register undefined_value = a3;
1467  Register scratch = a4;
1468 
1469  __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
1470 
1471  // 1. Load target into a1 (if present), argumentsList into a2 (if present),
1472  // remove all arguments from the stack (including the receiver), and push
1473  // thisArgument (if present) instead.
1474  {
1475  // Claim (3 - argc) dummy arguments form the stack, to put the stack in a
1476  // consistent state for a simple pop operation.
1477 
1478  __ Dsubu(sp, sp, Operand(3 * kPointerSize));
1479  __ Dlsa(sp, sp, argc, kPointerSizeLog2);
1480  __ mov(scratch, argc);
1481  __ Pop(target, this_argument, arguments_list);
1482  __ Movz(arguments_list, undefined_value, scratch); // if argc == 0
1483  __ Movz(this_argument, undefined_value, scratch); // if argc == 0
1484  __ Movz(target, undefined_value, scratch); // if argc == 0
1485  __ Dsubu(scratch, scratch, Operand(1));
1486  __ Movz(arguments_list, undefined_value, scratch); // if argc == 1
1487  __ Movz(this_argument, undefined_value, scratch); // if argc == 1
1488  __ Dsubu(scratch, scratch, Operand(1));
1489  __ Movz(arguments_list, undefined_value, scratch); // if argc == 2
1490 
1491  __ Sd(this_argument, MemOperand(sp, 0)); // Overwrite receiver
1492  }
1493 
1494  // ----------- S t a t e -------------
1495  // -- a2 : argumentsList
1496  // -- a1 : target
1497  // -- a3 : undefined root value
1498  // -- sp[0] : thisArgument
1499  // -----------------------------------
1500 
1501  // 2. We don't need to check explicitly for callable target here,
1502  // since that's the first thing the Call/CallWithArrayLike builtins
1503  // will do.
1504 
1505  // 3. Apply the target to the given argumentsList.
1506  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1507  RelocInfo::CODE_TARGET);
1508 }
1509 
1510 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1511  // ----------- S t a t e -------------
1512  // -- a0 : argc
1513  // -- sp[0] : new.target (optional) (dummy value if argc <= 2)
1514  // -- sp[4] : argumentsList (dummy value if argc <= 1)
1515  // -- sp[8] : target (dummy value if argc == 0)
1516  // -- sp[12] : receiver
1517  // -----------------------------------
1518  Register argc = a0;
1519  Register arguments_list = a2;
1520  Register target = a1;
1521  Register new_target = a3;
1522  Register undefined_value = a4;
1523  Register scratch = a5;
1524 
1525  __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
1526 
1527  // 1. Load target into a1 (if present), argumentsList into a2 (if present),
1528  // new.target into a3 (if present, otherwise use target), remove all
1529  // arguments from the stack (including the receiver), and push thisArgument
1530  // (if present) instead.
1531  {
1532  // Claim (3 - argc) dummy arguments form the stack, to put the stack in a
1533  // consistent state for a simple pop operation.
1534 
1535  __ Dsubu(sp, sp, Operand(3 * kPointerSize));
1536  __ Dlsa(sp, sp, argc, kPointerSizeLog2);
1537  __ mov(scratch, argc);
1538  __ Pop(target, arguments_list, new_target);
1539  __ Movz(arguments_list, undefined_value, scratch); // if argc == 0
1540  __ Movz(new_target, undefined_value, scratch); // if argc == 0
1541  __ Movz(target, undefined_value, scratch); // if argc == 0
1542  __ Dsubu(scratch, scratch, Operand(1));
1543  __ Movz(arguments_list, undefined_value, scratch); // if argc == 1
1544  __ Movz(new_target, target, scratch); // if argc == 1
1545  __ Dsubu(scratch, scratch, Operand(1));
1546  __ Movz(new_target, target, scratch); // if argc == 2
1547 
1548  __ Sd(undefined_value, MemOperand(sp, 0)); // Overwrite receiver
1549  }
1550 
1551  // ----------- S t a t e -------------
1552  // -- a2 : argumentsList
1553  // -- a1 : target
1554  // -- a3 : new.target
1555  // -- sp[0] : receiver (undefined)
1556  // -----------------------------------
1557 
1558  // 2. We don't need to check explicitly for constructor target here,
1559  // since that's the first thing the Construct/ConstructWithArrayLike
1560  // builtins will do.
1561 
1562  // 3. We don't need to check explicitly for constructor new.target here,
1563  // since that's the second thing the Construct/ConstructWithArrayLike
1564  // builtins will do.
1565 
1566  // 4. Construct the target with the given new.target and argumentsList.
1567  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1568  RelocInfo::CODE_TARGET);
1569 }
1570 
1571 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1572  __ SmiTag(a0);
1573  __ li(a4, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1574  __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit());
1575  __ Push(Smi::zero()); // Padding.
1576  __ Daddu(fp, sp,
1577  Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
1578 }
1579 
1580 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1581  // ----------- S t a t e -------------
1582  // -- v0 : result being passed through
1583  // -----------------------------------
1584  // Get the number of arguments passed (as a smi), tear down the frame and
1585  // then tear down the parameters.
1586  __ Ld(a1, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1587  __ mov(sp, fp);
1588  __ MultiPop(fp.bit() | ra.bit());
1589  __ SmiScale(a4, a1, kPointerSizeLog2);
1590  __ Daddu(sp, sp, a4);
1591  // Adjust for the receiver.
1592  __ Daddu(sp, sp, Operand(kPointerSize));
1593 }
1594 
1595 // static
1596 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1597  Handle<Code> code) {
1598  // ----------- S t a t e -------------
1599  // -- a1 : target
1600  // -- a0 : number of parameters on the stack (not including the receiver)
1601  // -- a2 : arguments list (a FixedArray)
1602  // -- a4 : len (number of elements to push from args)
1603  // -- a3 : new.target (for [[Construct]])
1604  // -----------------------------------
1605  if (masm->emit_debug_code()) {
1606  // Allow a2 to be a FixedArray, or a FixedDoubleArray if a4 == 0.
1607  Label ok, fail;
1608  __ AssertNotSmi(a2);
1609  __ GetObjectType(a2, t8, t8);
1610  __ Branch(&ok, eq, t8, Operand(FIXED_ARRAY_TYPE));
1611  __ Branch(&fail, ne, t8, Operand(FIXED_DOUBLE_ARRAY_TYPE));
1612  __ Branch(&ok, eq, a4, Operand(zero_reg));
1613  // Fall through.
1614  __ bind(&fail);
1615  __ Abort(AbortReason::kOperandIsNotAFixedArray);
1616 
1617  __ bind(&ok);
1618  }
1619 
1620  Register args = a2;
1621  Register len = a4;
1622 
1623  // Check for stack overflow.
1624  Label stack_overflow;
1625  Generate_StackOverflowCheck(masm, len, kScratchReg, a5, &stack_overflow);
1626 
1627  // Push arguments onto the stack (thisArgument is already on the stack).
1628  {
1629  Label done, push, loop;
1630  Register src = a6;
1631  Register scratch = len;
1632 
1633  __ daddiu(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
1634  __ Branch(&done, eq, len, Operand(zero_reg), i::USE_DELAY_SLOT);
1635  __ Daddu(a0, a0, len); // The 'len' argument for Call() or Construct().
1636  __ dsll(scratch, len, kPointerSizeLog2);
1637  __ Dsubu(scratch, sp, Operand(scratch));
1638  __ LoadRoot(t1, RootIndex::kTheHoleValue);
1639  __ bind(&loop);
1640  __ Ld(a5, MemOperand(src));
1641  __ Branch(&push, ne, a5, Operand(t1));
1642  __ LoadRoot(a5, RootIndex::kUndefinedValue);
1643  __ bind(&push);
1644  __ daddiu(src, src, kPointerSize);
1645  __ Push(a5);
1646  __ Branch(&loop, ne, scratch, Operand(sp));
1647  __ bind(&done);
1648  }
1649 
1650  // Tail-call to the actual Call or Construct builtin.
1651  __ Jump(code, RelocInfo::CODE_TARGET);
1652 
1653  __ bind(&stack_overflow);
1654  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1655 }
1656 
1657 // static
1658 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1659  CallOrConstructMode mode,
1660  Handle<Code> code) {
1661  // ----------- S t a t e -------------
1662  // -- a0 : the number of arguments (not including the receiver)
1663  // -- a3 : the new.target (for [[Construct]] calls)
1664  // -- a1 : the target to call (can be any Object)
1665  // -- a2 : start index (to support rest parameters)
1666  // -----------------------------------
1667 
1668  // Check if new.target has a [[Construct]] internal method.
1669  if (mode == CallOrConstructMode::kConstruct) {
1670  Label new_target_constructor, new_target_not_constructor;
1671  __ JumpIfSmi(a3, &new_target_not_constructor);
1672  __ ld(t1, FieldMemOperand(a3, HeapObject::kMapOffset));
1673  __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
1674  __ And(t1, t1, Operand(Map::IsConstructorBit::kMask));
1675  __ Branch(&new_target_constructor, ne, t1, Operand(zero_reg));
1676  __ bind(&new_target_not_constructor);
1677  {
1678  FrameScope scope(masm, StackFrame::MANUAL);
1679  __ EnterFrame(StackFrame::INTERNAL);
1680  __ Push(a3);
1681  __ CallRuntime(Runtime::kThrowNotConstructor);
1682  }
1683  __ bind(&new_target_constructor);
1684  }
1685 
1686  // Check if we have an arguments adaptor frame below the function frame.
1687  Label arguments_adaptor, arguments_done;
1688  __ Ld(a6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1689  __ Ld(a7, MemOperand(a6, CommonFrameConstants::kContextOrFrameTypeOffset));
1690  __ Branch(&arguments_adaptor, eq, a7,
1691  Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1692  {
1693  __ Ld(a7, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1694  __ Ld(a7, FieldMemOperand(a7, JSFunction::kSharedFunctionInfoOffset));
1695  __ Lhu(a7, FieldMemOperand(
1696  a7, SharedFunctionInfo::kFormalParameterCountOffset));
1697  __ mov(a6, fp);
1698  }
1699  __ Branch(&arguments_done);
1700  __ bind(&arguments_adaptor);
1701  {
1702  // Just get the length from the ArgumentsAdaptorFrame.
1703  __ SmiUntag(a7,
1704  MemOperand(a6, ArgumentsAdaptorFrameConstants::kLengthOffset));
1705  }
1706  __ bind(&arguments_done);
1707 
1708  Label stack_done, stack_overflow;
1709  __ Subu(a7, a7, a2);
1710  __ Branch(&stack_done, le, a7, Operand(zero_reg));
1711  {
1712  // Check for stack overflow.
1713  Generate_StackOverflowCheck(masm, a7, a4, a5, &stack_overflow);
1714 
1715  // Forward the arguments from the caller frame.
1716  {
1717  Label loop;
1718  __ Daddu(a0, a0, a7);
1719  __ bind(&loop);
1720  {
1721  __ Dlsa(kScratchReg, a6, a7, kPointerSizeLog2);
1722  __ Ld(kScratchReg, MemOperand(kScratchReg, 1 * kPointerSize));
1723  __ push(kScratchReg);
1724  __ Subu(a7, a7, Operand(1));
1725  __ Branch(&loop, ne, a7, Operand(zero_reg));
1726  }
1727  }
1728  }
1729  __ Branch(&stack_done);
1730  __ bind(&stack_overflow);
1731  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1732  __ bind(&stack_done);
1733 
1734  // Tail-call to the {code} handler.
1735  __ Jump(code, RelocInfo::CODE_TARGET);
1736 }
1737 
1738 // static
1739 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1740  ConvertReceiverMode mode) {
1741  // ----------- S t a t e -------------
1742  // -- a0 : the number of arguments (not including the receiver)
1743  // -- a1 : the function to call (checked to be a JSFunction)
1744  // -----------------------------------
1745  __ AssertFunction(a1);
1746 
1747  // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1748  // Check that function is not a "classConstructor".
1749  Label class_constructor;
1750  __ Ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1751  __ Lwu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFlagsOffset));
1752  __ And(kScratchReg, a3,
1753  Operand(SharedFunctionInfo::IsClassConstructorBit::kMask));
1754  __ Branch(&class_constructor, ne, kScratchReg, Operand(zero_reg));
1755 
1756  // Enter the context of the function; ToObject has to run in the function
1757  // context, and we also need to take the global proxy from the function
1758  // context in case of conversion.
1759  __ Ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1760  // We need to convert the receiver for non-native sloppy mode functions.
1761  Label done_convert;
1762  __ Lwu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFlagsOffset));
1763  __ And(kScratchReg, a3,
1764  Operand(SharedFunctionInfo::IsNativeBit::kMask |
1765  SharedFunctionInfo::IsStrictBit::kMask));
1766  __ Branch(&done_convert, ne, kScratchReg, Operand(zero_reg));
1767  {
1768  // ----------- S t a t e -------------
1769  // -- a0 : the number of arguments (not including the receiver)
1770  // -- a1 : the function to call (checked to be a JSFunction)
1771  // -- a2 : the shared function info.
1772  // -- cp : the function context.
1773  // -----------------------------------
1774 
1775  if (mode == ConvertReceiverMode::kNullOrUndefined) {
1776  // Patch receiver to global proxy.
1777  __ LoadGlobalProxy(a3);
1778  } else {
1779  Label convert_to_object, convert_receiver;
1780  __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
1781  __ Ld(a3, MemOperand(kScratchReg));
1782  __ JumpIfSmi(a3, &convert_to_object);
1783  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1784  __ GetObjectType(a3, a4, a4);
1785  __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE));
1786  if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1787  Label convert_global_proxy;
1788  __ JumpIfRoot(a3, RootIndex::kUndefinedValue, &convert_global_proxy);
1789  __ JumpIfNotRoot(a3, RootIndex::kNullValue, &convert_to_object);
1790  __ bind(&convert_global_proxy);
1791  {
1792  // Patch receiver to global proxy.
1793  __ LoadGlobalProxy(a3);
1794  }
1795  __ Branch(&convert_receiver);
1796  }
1797  __ bind(&convert_to_object);
1798  {
1799  // Convert receiver using ToObject.
1800  // TODO(bmeurer): Inline the allocation here to avoid building the frame
1801  // in the fast case? (fall back to AllocateInNewSpace?)
1802  FrameScope scope(masm, StackFrame::INTERNAL);
1803  __ SmiTag(a0);
1804  __ Push(a0, a1);
1805  __ mov(a0, a3);
1806  __ Push(cp);
1807  __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1808  RelocInfo::CODE_TARGET);
1809  __ Pop(cp);
1810  __ mov(a3, v0);
1811  __ Pop(a0, a1);
1812  __ SmiUntag(a0);
1813  }
1814  __ Ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1815  __ bind(&convert_receiver);
1816  }
1817  __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
1818  __ Sd(a3, MemOperand(kScratchReg));
1819  }
1820  __ bind(&done_convert);
1821 
1822  // ----------- S t a t e -------------
1823  // -- a0 : the number of arguments (not including the receiver)
1824  // -- a1 : the function to call (checked to be a JSFunction)
1825  // -- a2 : the shared function info.
1826  // -- cp : the function context.
1827  // -----------------------------------
1828 
1829  __ Lhu(a2,
1830  FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
1831  ParameterCount actual(a0);
1832  ParameterCount expected(a2);
1833  __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION);
1834 
1835  // The function is a "classConstructor", need to raise an exception.
1836  __ bind(&class_constructor);
1837  {
1838  FrameScope frame(masm, StackFrame::INTERNAL);
1839  __ Push(a1);
1840  __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
1841  }
1842 }
1843 
1844 // static
1845 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
1846  // ----------- S t a t e -------------
1847  // -- a0 : the number of arguments (not including the receiver)
1848  // -- a1 : the function to call (checked to be a JSBoundFunction)
1849  // -----------------------------------
1850  __ AssertBoundFunction(a1);
1851 
1852  // Patch the receiver to [[BoundThis]].
1853  {
1854  __ Ld(kScratchReg, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
1855  __ Dlsa(a4, sp, a0, kPointerSizeLog2);
1856  __ Sd(kScratchReg, MemOperand(a4));
1857  }
1858 
1859  // Load [[BoundArguments]] into a2 and length of that into a4.
1860  __ Ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
1861  __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
1862 
1863  // ----------- S t a t e -------------
1864  // -- a0 : the number of arguments (not including the receiver)
1865  // -- a1 : the function to call (checked to be a JSBoundFunction)
1866  // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
1867  // -- a4 : the number of [[BoundArguments]]
1868  // -----------------------------------
1869 
1870  // Reserve stack space for the [[BoundArguments]].
1871  {
1872  Label done;
1873  __ dsll(a5, a4, kPointerSizeLog2);
1874  __ Dsubu(sp, sp, Operand(a5));
1875  // Check the stack for overflow. We are not trying to catch interruptions
1876  // (i.e. debug break and preemption) here, so check the "real stack limit".
1877  __ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
1878  __ Branch(&done, hs, sp, Operand(kScratchReg));
1879  // Restore the stack pointer.
1880  __ Daddu(sp, sp, Operand(a5));
1881  {
1882  FrameScope scope(masm, StackFrame::MANUAL);
1883  __ EnterFrame(StackFrame::INTERNAL);
1884  __ CallRuntime(Runtime::kThrowStackOverflow);
1885  }
1886  __ bind(&done);
1887  }
1888 
1889  // Relocate arguments down the stack.
1890  {
1891  Label loop, done_loop;
1892  __ mov(a5, zero_reg);
1893  __ bind(&loop);
1894  __ Branch(&done_loop, gt, a5, Operand(a0));
1895  __ Dlsa(a6, sp, a4, kPointerSizeLog2);
1896  __ Ld(kScratchReg, MemOperand(a6));
1897  __ Dlsa(a6, sp, a5, kPointerSizeLog2);
1898  __ Sd(kScratchReg, MemOperand(a6));
1899  __ Daddu(a4, a4, Operand(1));
1900  __ Daddu(a5, a5, Operand(1));
1901  __ Branch(&loop);
1902  __ bind(&done_loop);
1903  }
1904 
1905  // Copy [[BoundArguments]] to the stack (below the arguments).
1906  {
1907  Label loop, done_loop;
1908  __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
1909  __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1910  __ bind(&loop);
1911  __ Dsubu(a4, a4, Operand(1));
1912  __ Branch(&done_loop, lt, a4, Operand(zero_reg));
1913  __ Dlsa(a5, a2, a4, kPointerSizeLog2);
1914  __ Ld(kScratchReg, MemOperand(a5));
1915  __ Dlsa(a5, sp, a0, kPointerSizeLog2);
1916  __ Sd(kScratchReg, MemOperand(a5));
1917  __ Daddu(a0, a0, Operand(1));
1918  __ Branch(&loop);
1919  __ bind(&done_loop);
1920  }
1921 
1922  // Call the [[BoundTargetFunction]] via the Call builtin.
1923  __ Ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
1924  __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
1925  RelocInfo::CODE_TARGET);
1926 }
1927 
1928 // static
1929 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
1930  // ----------- S t a t e -------------
1931  // -- a0 : the number of arguments (not including the receiver)
1932  // -- a1 : the target to call (can be any Object).
1933  // -----------------------------------
1934 
1935  Label non_callable, non_function, non_smi;
1936  __ JumpIfSmi(a1, &non_callable);
1937  __ bind(&non_smi);
1938  __ GetObjectType(a1, t1, t2);
1939  __ Jump(masm->isolate()->builtins()->CallFunction(mode),
1940  RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
1941  __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
1942  RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
1943 
1944  // Check if target has a [[Call]] internal method.
1945  __ Lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
1946  __ And(t1, t1, Operand(Map::IsCallableBit::kMask));
1947  __ Branch(&non_callable, eq, t1, Operand(zero_reg));
1948 
1949  __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
1950  __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
1951 
1952  // 2. Call to something else, which might have a [[Call]] internal method (if
1953  // not we raise an exception).
1954  __ bind(&non_function);
1955  // Overwrite the original receiver with the (original) target.
1956  __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
1957  __ Sd(a1, MemOperand(kScratchReg));
1958  // Let the "call_as_function_delegate" take care of the rest.
1959  __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
1960  __ Jump(masm->isolate()->builtins()->CallFunction(
1961  ConvertReceiverMode::kNotNullOrUndefined),
1962  RelocInfo::CODE_TARGET);
1963 
1964  // 3. Call to something that is not callable.
1965  __ bind(&non_callable);
1966  {
1967  FrameScope scope(masm, StackFrame::INTERNAL);
1968  __ Push(a1);
1969  __ CallRuntime(Runtime::kThrowCalledNonCallable);
1970  }
1971 }
1972 
1973 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
1974  // ----------- S t a t e -------------
1975  // -- a0 : the number of arguments (not including the receiver)
1976  // -- a1 : the constructor to call (checked to be a JSFunction)
1977  // -- a3 : the new target (checked to be a constructor)
1978  // -----------------------------------
1979  __ AssertConstructor(a1);
1980  __ AssertFunction(a1);
1981 
1982  // Calling convention for function specific ConstructStubs require
1983  // a2 to contain either an AllocationSite or undefined.
1984  __ LoadRoot(a2, RootIndex::kUndefinedValue);
1985 
1986  Label call_generic_stub;
1987 
1988  // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
1989  __ Ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1990  __ lwu(a4, FieldMemOperand(a4, SharedFunctionInfo::kFlagsOffset));
1991  __ And(a4, a4, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1992  __ Branch(&call_generic_stub, eq, a4, Operand(zero_reg));
1993 
1994  __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
1995  RelocInfo::CODE_TARGET);
1996 
1997  __ bind(&call_generic_stub);
1998  __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
1999  RelocInfo::CODE_TARGET);
2000 }
2001 
2002 // static
2003 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2004  // ----------- S t a t e -------------
2005  // -- a0 : the number of arguments (not including the receiver)
2006  // -- a1 : the function to call (checked to be a JSBoundFunction)
2007  // -- a3 : the new target (checked to be a constructor)
2008  // -----------------------------------
2009  __ AssertConstructor(a1);
2010  __ AssertBoundFunction(a1);
2011 
2012  // Load [[BoundArguments]] into a2 and length of that into a4.
2013  __ Ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2014  __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2015 
2016  // ----------- S t a t e -------------
2017  // -- a0 : the number of arguments (not including the receiver)
2018  // -- a1 : the function to call (checked to be a JSBoundFunction)
2019  // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2020  // -- a3 : the new target (checked to be a constructor)
2021  // -- a4 : the number of [[BoundArguments]]
2022  // -----------------------------------
2023 
2024  // Reserve stack space for the [[BoundArguments]].
2025  {
2026  Label done;
2027  __ dsll(a5, a4, kPointerSizeLog2);
2028  __ Dsubu(sp, sp, Operand(a5));
2029  // Check the stack for overflow. We are not trying to catch interruptions
2030  // (i.e. debug break and preemption) here, so check the "real stack limit".
2031  __ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
2032  __ Branch(&done, hs, sp, Operand(kScratchReg));
2033  // Restore the stack pointer.
2034  __ Daddu(sp, sp, Operand(a5));
2035  {
2036  FrameScope scope(masm, StackFrame::MANUAL);
2037  __ EnterFrame(StackFrame::INTERNAL);
2038  __ CallRuntime(Runtime::kThrowStackOverflow);
2039  }
2040  __ bind(&done);
2041  }
2042 
2043  // Relocate arguments down the stack.
2044  {
2045  Label loop, done_loop;
2046  __ mov(a5, zero_reg);
2047  __ bind(&loop);
2048  __ Branch(&done_loop, ge, a5, Operand(a0));
2049  __ Dlsa(a6, sp, a4, kPointerSizeLog2);
2050  __ Ld(kScratchReg, MemOperand(a6));
2051  __ Dlsa(a6, sp, a5, kPointerSizeLog2);
2052  __ Sd(kScratchReg, MemOperand(a6));
2053  __ Daddu(a4, a4, Operand(1));
2054  __ Daddu(a5, a5, Operand(1));
2055  __ Branch(&loop);
2056  __ bind(&done_loop);
2057  }
2058 
2059  // Copy [[BoundArguments]] to the stack (below the arguments).
2060  {
2061  Label loop, done_loop;
2062  __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2063  __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2064  __ bind(&loop);
2065  __ Dsubu(a4, a4, Operand(1));
2066  __ Branch(&done_loop, lt, a4, Operand(zero_reg));
2067  __ Dlsa(a5, a2, a4, kPointerSizeLog2);
2068  __ Ld(kScratchReg, MemOperand(a5));
2069  __ Dlsa(a5, sp, a0, kPointerSizeLog2);
2070  __ Sd(kScratchReg, MemOperand(a5));
2071  __ Daddu(a0, a0, Operand(1));
2072  __ Branch(&loop);
2073  __ bind(&done_loop);
2074  }
2075 
2076  // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2077  {
2078  Label skip_load;
2079  __ Branch(&skip_load, ne, a1, Operand(a3));
2080  __ Ld(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2081  __ bind(&skip_load);
2082  }
2083 
2084  // Construct the [[BoundTargetFunction]] via the Construct builtin.
2085  __ Ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2086  __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2087 }
2088 
2089 // static
2090 void Builtins::Generate_Construct(MacroAssembler* masm) {
2091  // ----------- S t a t e -------------
2092  // -- a0 : the number of arguments (not including the receiver)
2093  // -- a1 : the constructor to call (can be any Object)
2094  // -- a3 : the new target (either the same as the constructor or
2095  // the JSFunction on which new was invoked initially)
2096  // -----------------------------------
2097 
2098  // Check if target is a Smi.
2099  Label non_constructor, non_proxy;
2100  __ JumpIfSmi(a1, &non_constructor);
2101 
2102  // Check if target has a [[Construct]] internal method.
2103  __ ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
2104  __ Lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
2105  __ And(t3, t3, Operand(Map::IsConstructorBit::kMask));
2106  __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
2107 
2108  // Dispatch based on instance type.
2109  __ Lhu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
2110  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2111  RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2112 
2113  // Only dispatch to bound functions after checking whether they are
2114  // constructors.
2115  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2116  RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2117 
2118  // Only dispatch to proxies after checking whether they are constructors.
2119  __ Branch(&non_proxy, ne, t2, Operand(JS_PROXY_TYPE));
2120  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2121  RelocInfo::CODE_TARGET);
2122 
2123  // Called Construct on an exotic Object with a [[Construct]] internal method.
2124  __ bind(&non_proxy);
2125  {
2126  // Overwrite the original receiver with the (original) target.
2127  __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
2128  __ Sd(a1, MemOperand(kScratchReg));
2129  // Let the "call_as_constructor_delegate" take care of the rest.
2130  __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
2131  __ Jump(masm->isolate()->builtins()->CallFunction(),
2132  RelocInfo::CODE_TARGET);
2133  }
2134 
2135  // Called Construct on an Object that doesn't have a [[Construct]] internal
2136  // method.
2137  __ bind(&non_constructor);
2138  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2139  RelocInfo::CODE_TARGET);
2140 }
2141 
2142 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2143  // State setup as expected by MacroAssembler::InvokePrologue.
2144  // ----------- S t a t e -------------
2145  // -- a0: actual arguments count
2146  // -- a1: function (passed through to callee)
2147  // -- a2: expected arguments count
2148  // -- a3: new target (passed through to callee)
2149  // -----------------------------------
2150 
2151  Label invoke, dont_adapt_arguments, stack_overflow;
2152 
2153  Label enough, too_few;
2154  __ Branch(&dont_adapt_arguments, eq, a2,
2155  Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2156  // We use Uless as the number of argument should always be greater than 0.
2157  __ Branch(&too_few, Uless, a0, Operand(a2));
2158 
2159  { // Enough parameters: actual >= expected.
2160  // a0: actual number of arguments as a smi
2161  // a1: function
2162  // a2: expected number of arguments
2163  // a3: new target (passed through to callee)
2164  __ bind(&enough);
2165  EnterArgumentsAdaptorFrame(masm);
2166  Generate_StackOverflowCheck(masm, a2, a5, kScratchReg, &stack_overflow);
2167 
2168  // Calculate copy start address into a0 and copy end address into a4.
2169  __ SmiScale(a0, a0, kPointerSizeLog2);
2170  __ Daddu(a0, fp, a0);
2171  // Adjust for return address and receiver.
2172  __ Daddu(a0, a0, Operand(2 * kPointerSize));
2173  // Compute copy end address.
2174  __ dsll(a4, a2, kPointerSizeLog2);
2175  __ dsubu(a4, a0, a4);
2176 
2177  // Copy the arguments (including the receiver) to the new stack frame.
2178  // a0: copy start address
2179  // a1: function
2180  // a2: expected number of arguments
2181  // a3: new target (passed through to callee)
2182  // a4: copy end address
2183 
2184  Label copy;
2185  __ bind(&copy);
2186  __ Ld(a5, MemOperand(a0));
2187  __ push(a5);
2188  __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a4));
2189  __ daddiu(a0, a0, -kPointerSize); // In delay slot.
2190 
2191  __ jmp(&invoke);
2192  }
2193 
2194  { // Too few parameters: Actual < expected.
2195  __ bind(&too_few);
2196  EnterArgumentsAdaptorFrame(masm);
2197  Generate_StackOverflowCheck(masm, a2, a5, kScratchReg, &stack_overflow);
2198 
2199  // Calculate copy start address into a0 and copy end address into a7.
2200  // a0: actual number of arguments as a smi
2201  // a1: function
2202  // a2: expected number of arguments
2203  // a3: new target (passed through to callee)
2204  __ SmiScale(a0, a0, kPointerSizeLog2);
2205  __ Daddu(a0, fp, a0);
2206  // Adjust for return address and receiver.
2207  __ Daddu(a0, a0, Operand(2 * kPointerSize));
2208  // Compute copy end address. Also adjust for return address.
2209  __ Daddu(a7, fp, kPointerSize);
2210 
2211  // Copy the arguments (including the receiver) to the new stack frame.
2212  // a0: copy start address
2213  // a1: function
2214  // a2: expected number of arguments
2215  // a3: new target (passed through to callee)
2216  // a7: copy end address
2217  Label copy;
2218  __ bind(&copy);
2219  __ Ld(a4, MemOperand(a0)); // Adjusted above for return addr and receiver.
2220  __ Dsubu(sp, sp, kPointerSize);
2221  __ Dsubu(a0, a0, kPointerSize);
2222  __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a7));
2223  __ Sd(a4, MemOperand(sp)); // In the delay slot.
2224 
2225  // Fill the remaining expected arguments with undefined.
2226  // a1: function
2227  // a2: expected number of arguments
2228  // a3: new target (passed through to callee)
2229  __ LoadRoot(a5, RootIndex::kUndefinedValue);
2230  __ dsll(a6, a2, kPointerSizeLog2);
2231  __ Dsubu(a4, fp, Operand(a6));
2232  // Adjust for frame.
2233  __ Dsubu(a4, a4,
2234  Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
2235  kPointerSize));
2236 
2237  Label fill;
2238  __ bind(&fill);
2239  __ Dsubu(sp, sp, kPointerSize);
2240  __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a4));
2241  __ Sd(a5, MemOperand(sp));
2242  }
2243 
2244  // Call the entry point.
2245  __ bind(&invoke);
2246  __ mov(a0, a2);
2247  // a0 : expected number of arguments
2248  // a1 : function (passed through to callee)
2249  // a3: new target (passed through to callee)
2250  static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
2251  __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
2252  __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
2253  __ Call(a2);
2254 
2255  // Store offset of return address for deoptimizer.
2256  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2257 
2258  // Exit frame and return.
2259  LeaveArgumentsAdaptorFrame(masm);
2260  __ Ret();
2261 
2262  // -------------------------------------------
2263  // Don't adapt arguments.
2264  // -------------------------------------------
2265  __ bind(&dont_adapt_arguments);
2266  static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
2267  __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
2268  __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
2269  __ Jump(a2);
2270 
2271  __ bind(&stack_overflow);
2272  {
2273  FrameScope frame(masm, StackFrame::MANUAL);
2274  __ CallRuntime(Runtime::kThrowStackOverflow);
2275  __ break_(0xCC);
2276  }
2277 }
2278 
2279 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2280  // The function index was put in t0 by the jump table trampoline.
2281  // Convert to Smi for the runtime call
2282  __ SmiTag(kWasmCompileLazyFuncIndexRegister);
2283  {
2284  HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2285  FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2286 
2287  // Save all parameter registers (see wasm-linkage.cc). They might be
2288  // overwritten in the runtime call below. We don't have any callee-saved
2289  // registers in wasm, so no need to store anything else.
2290  constexpr RegList gp_regs =
2291  Register::ListOf<a0, a1, a2, a3, a4, a5, a6, a7>();
2292  constexpr RegList fp_regs =
2293  DoubleRegister::ListOf<f2, f4, f6, f8, f10, f12, f14>();
2294  __ MultiPush(gp_regs);
2295  __ MultiPushFPU(fp_regs);
2296 
2297  // Pass instance and function index as an explicit arguments to the runtime
2298  // function.
2299  __ Push(kWasmInstanceRegister, kWasmCompileLazyFuncIndexRegister);
2300  // Load the correct CEntry builtin from the instance object.
2301  __ Ld(a2, FieldMemOperand(kWasmInstanceRegister,
2302  WasmInstanceObject::kCEntryStubOffset));
2303  // Initialize the JavaScript context with 0. CEntry will use it to
2304  // set the current context on the isolate.
2305  __ Move(kContextRegister, Smi::zero());
2306  __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, a2);
2307 
2308  // Restore registers.
2309  __ MultiPopFPU(fp_regs);
2310  __ MultiPop(gp_regs);
2311  }
2312  // Finally, jump to the entrypoint.
2313  __ Jump(v0);
2314 }
2315 
2316 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2317  SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2318  bool builtin_exit_frame) {
2319  // Called from JavaScript; parameters are on stack as if calling JS function
2320  // a0: number of arguments including receiver
2321  // a1: pointer to builtin function
2322  // fp: frame pointer (restored after C call)
2323  // sp: stack pointer (restored as callee's sp after C call)
2324  // cp: current context (C callee-saved)
2325  //
2326  // If argv_mode == kArgvInRegister:
2327  // a2: pointer to the first argument
2328 
2329  if (argv_mode == kArgvInRegister) {
2330  // Move argv into the correct register.
2331  __ mov(s1, a2);
2332  } else {
2333  // Compute the argv pointer in a callee-saved register.
2334  __ Dlsa(s1, sp, a0, kPointerSizeLog2);
2335  __ Dsubu(s1, s1, kPointerSize);
2336  }
2337 
2338  // Enter the exit frame that transitions from JavaScript to C++.
2339  FrameScope scope(masm, StackFrame::MANUAL);
2340  __ EnterExitFrame(
2341  save_doubles == kSaveFPRegs, 0,
2342  builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2343 
2344  // s0: number of arguments including receiver (C callee-saved)
2345  // s1: pointer to first argument (C callee-saved)
2346  // s2: pointer to builtin function (C callee-saved)
2347 
2348  // Prepare arguments for C routine.
2349  // a0 = argc
2350  __ mov(s0, a0);
2351  __ mov(s2, a1);
2352 
2353  // We are calling compiled C/C++ code. a0 and a1 hold our two arguments. We
2354  // also need to reserve the 4 argument slots on the stack.
2355 
2356  __ AssertStackIsAligned();
2357 
2358  // a0 = argc, a1 = argv, a2 = isolate
2359  __ li(a2, ExternalReference::isolate_address(masm->isolate()));
2360  __ mov(a1, s1);
2361 
2362  // To let the GC traverse the return address of the exit frames, we need to
2363  // know where the return address is. The CEntry is unmovable, so
2364  // we can store the address on the stack to be able to find it again and
2365  // we never have to restore it, because it will not change.
2366  {
2367  Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm);
2368  int kNumInstructionsToJump = 4;
2369  Label find_ra;
2370  // Adjust the value in ra to point to the correct return location, 2nd
2371  // instruction past the real call into C code (the jalr(t9)), and push it.
2372  // This is the return address of the exit frame.
2373  if (kArchVariant >= kMips64r6) {
2374  __ addiupc(ra, kNumInstructionsToJump + 1);
2375  } else {
2376  // This no-op-and-link sequence saves PC + 8 in ra register on pre-r6 MIPS
2377  __ nal(); // nal has branch delay slot.
2378  __ Daddu(ra, ra, kNumInstructionsToJump * kInstrSize);
2379  }
2380  __ bind(&find_ra);
2381 
2382  // This spot was reserved in EnterExitFrame.
2383  __ Sd(ra, MemOperand(sp));
2384  // Stack space reservation moved to the branch delay slot below.
2385  // Stack is still aligned.
2386 
2387  // Call the C routine.
2388  __ mov(t9, s2); // Function pointer to t9 to conform to ABI for PIC.
2389  __ jalr(t9);
2390  // Set up sp in the delay slot.
2391  __ daddiu(sp, sp, -kCArgsSlotsSize);
2392  // Make sure the stored 'ra' points to this position.
2393  DCHECK_EQ(kNumInstructionsToJump,
2394  masm->InstructionsGeneratedSince(&find_ra));
2395  }
2396 
2397  // Result returned in v0 or v1:v0 - do not destroy these registers!
2398 
2399  // Check result for exception sentinel.
2400  Label exception_returned;
2401  __ LoadRoot(a4, RootIndex::kException);
2402  __ Branch(&exception_returned, eq, a4, Operand(v0));
2403 
2404  // Check that there is no pending exception, otherwise we
2405  // should have returned the exception sentinel.
2406  if (FLAG_debug_code) {
2407  Label okay;
2408  ExternalReference pending_exception_address = ExternalReference::Create(
2409  IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2410  __ li(a2, pending_exception_address);
2411  __ Ld(a2, MemOperand(a2));
2412  __ LoadRoot(a4, RootIndex::kTheHoleValue);
2413  // Cannot use check here as it attempts to generate call into runtime.
2414  __ Branch(&okay, eq, a4, Operand(a2));
2415  __ stop("Unexpected pending exception");
2416  __ bind(&okay);
2417  }
2418 
2419  // Exit C frame and return.
2420  // v0:v1: result
2421  // sp: stack pointer
2422  // fp: frame pointer
2423  Register argc = argv_mode == kArgvInRegister
2424  // We don't want to pop arguments so set argc to no_reg.
2425  ? no_reg
2426  // s0: still holds argc (callee-saved).
2427  : s0;
2428  __ LeaveExitFrame(save_doubles == kSaveFPRegs, argc, EMIT_RETURN);
2429 
2430  // Handling of exception.
2431  __ bind(&exception_returned);
2432 
2433  ExternalReference pending_handler_context_address = ExternalReference::Create(
2434  IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2435  ExternalReference pending_handler_entrypoint_address =
2436  ExternalReference::Create(
2437  IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2438  ExternalReference pending_handler_fp_address = ExternalReference::Create(
2439  IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2440  ExternalReference pending_handler_sp_address = ExternalReference::Create(
2441  IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2442 
2443  // Ask the runtime for help to determine the handler. This will set v0 to
2444  // contain the current pending exception, don't clobber it.
2445  ExternalReference find_handler =
2446  ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2447  {
2448  FrameScope scope(masm, StackFrame::MANUAL);
2449  __ PrepareCallCFunction(3, 0, a0);
2450  __ mov(a0, zero_reg);
2451  __ mov(a1, zero_reg);
2452  __ li(a2, ExternalReference::isolate_address(masm->isolate()));
2453  __ CallCFunction(find_handler, 3);
2454  }
2455 
2456  // Retrieve the handler context, SP and FP.
2457  __ li(cp, pending_handler_context_address);
2458  __ Ld(cp, MemOperand(cp));
2459  __ li(sp, pending_handler_sp_address);
2460  __ Ld(sp, MemOperand(sp));
2461  __ li(fp, pending_handler_fp_address);
2462  __ Ld(fp, MemOperand(fp));
2463 
2464  // If the handler is a JS frame, restore the context to the frame. Note that
2465  // the context will be set to (cp == 0) for non-JS frames.
2466  Label zero;
2467  __ Branch(&zero, eq, cp, Operand(zero_reg));
2468  __ Sd(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2469  __ bind(&zero);
2470 
2471  // Reset the masking register. This is done independent of the underlying
2472  // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
2473  // with both configurations. It is safe to always do this, because the
2474  // underlying register is caller-saved and can be arbitrarily clobbered.
2475  __ ResetSpeculationPoisonRegister();
2476 
2477  // Compute the handler entry address and jump to it.
2478  __ li(t9, pending_handler_entrypoint_address);
2479  __ Ld(t9, MemOperand(t9));
2480  __ Jump(t9);
2481 }
2482 
2483 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2484  Label out_of_range, only_low, negate, done;
2485  Register result_reg = t0;
2486 
2487  Register scratch = GetRegisterThatIsNotOneOf(result_reg);
2488  Register scratch2 = GetRegisterThatIsNotOneOf(result_reg, scratch);
2489  Register scratch3 = GetRegisterThatIsNotOneOf(result_reg, scratch, scratch2);
2490  DoubleRegister double_scratch = kScratchDoubleReg;
2491 
2492  // Account for saved regs.
2493  const int kArgumentOffset = 4 * kPointerSize;
2494 
2495  __ Push(result_reg);
2496  __ Push(scratch, scratch2, scratch3);
2497 
2498  // Load double input.
2499  __ Ldc1(double_scratch, MemOperand(sp, kArgumentOffset));
2500 
2501  // Clear cumulative exception flags and save the FCSR.
2502  __ cfc1(scratch2, FCSR);
2503  __ ctc1(zero_reg, FCSR);
2504 
2505  // Try a conversion to a signed integer.
2506  __ Trunc_w_d(double_scratch, double_scratch);
2507  // Move the converted value into the result register.
2508  __ mfc1(scratch3, double_scratch);
2509 
2510  // Retrieve and restore the FCSR.
2511  __ cfc1(scratch, FCSR);
2512  __ ctc1(scratch2, FCSR);
2513 
2514  // Check for overflow and NaNs.
2515  __ And(
2516  scratch, scratch,
2517  kFCSROverflowFlagMask | kFCSRUnderflowFlagMask | kFCSRInvalidOpFlagMask);
2518  // If we had no exceptions then set result_reg and we are done.
2519  Label error;
2520  __ Branch(&error, ne, scratch, Operand(zero_reg));
2521  __ Move(result_reg, scratch3);
2522  __ Branch(&done);
2523  __ bind(&error);
2524 
2525  // Load the double value and perform a manual truncation.
2526  Register input_high = scratch2;
2527  Register input_low = scratch3;
2528 
2529  __ Lw(input_low, MemOperand(sp, kArgumentOffset + Register::kMantissaOffset));
2530  __ Lw(input_high,
2531  MemOperand(sp, kArgumentOffset + Register::kExponentOffset));
2532 
2533  Label normal_exponent, restore_sign;
2534  // Extract the biased exponent in result.
2535  __ Ext(result_reg, input_high, HeapNumber::kExponentShift,
2536  HeapNumber::kExponentBits);
2537 
2538  // Check for Infinity and NaNs, which should return 0.
2539  __ Subu(scratch, result_reg, HeapNumber::kExponentMask);
2540  __ Movz(result_reg, zero_reg, scratch);
2541  __ Branch(&done, eq, scratch, Operand(zero_reg));
2542 
2543  // Express exponent as delta to (number of mantissa bits + 31).
2544  __ Subu(result_reg, result_reg,
2545  Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 31));
2546 
2547  // If the delta is strictly positive, all bits would be shifted away,
2548  // which means that we can return 0.
2549  __ Branch(&normal_exponent, le, result_reg, Operand(zero_reg));
2550  __ mov(result_reg, zero_reg);
2551  __ Branch(&done);
2552 
2553  __ bind(&normal_exponent);
2554  const int kShiftBase = HeapNumber::kNonMantissaBitsInTopWord - 1;
2555  // Calculate shift.
2556  __ Addu(scratch, result_reg, Operand(kShiftBase + HeapNumber::kMantissaBits));
2557 
2558  // Save the sign.
2559  Register sign = result_reg;
2560  result_reg = no_reg;
2561  __ And(sign, input_high, Operand(HeapNumber::kSignMask));
2562 
2563  // On ARM shifts > 31 bits are valid and will result in zero. On MIPS we need
2564  // to check for this specific case.
2565  Label high_shift_needed, high_shift_done;
2566  __ Branch(&high_shift_needed, lt, scratch, Operand(32));
2567  __ mov(input_high, zero_reg);
2568  __ Branch(&high_shift_done);
2569  __ bind(&high_shift_needed);
2570 
2571  // Set the implicit 1 before the mantissa part in input_high.
2572  __ Or(input_high, input_high,
2573  Operand(1 << HeapNumber::kMantissaBitsInTopWord));
2574  // Shift the mantissa bits to the correct position.
2575  // We don't need to clear non-mantissa bits as they will be shifted away.
2576  // If they weren't, it would mean that the answer is in the 32bit range.
2577  __ sllv(input_high, input_high, scratch);
2578 
2579  __ bind(&high_shift_done);
2580 
2581  // Replace the shifted bits with bits from the lower mantissa word.
2582  Label pos_shift, shift_done;
2583  __ li(kScratchReg, 32);
2584  __ subu(scratch, kScratchReg, scratch);
2585  __ Branch(&pos_shift, ge, scratch, Operand(zero_reg));
2586 
2587  // Negate scratch.
2588  __ Subu(scratch, zero_reg, scratch);
2589  __ sllv(input_low, input_low, scratch);
2590  __ Branch(&shift_done);
2591 
2592  __ bind(&pos_shift);
2593  __ srlv(input_low, input_low, scratch);
2594 
2595  __ bind(&shift_done);
2596  __ Or(input_high, input_high, Operand(input_low));
2597  // Restore sign if necessary.
2598  __ mov(scratch, sign);
2599  result_reg = sign;
2600  sign = no_reg;
2601  __ Subu(result_reg, zero_reg, input_high);
2602  __ Movz(result_reg, input_high, scratch);
2603 
2604  __ bind(&done);
2605 
2606  __ Sd(result_reg, MemOperand(sp, kArgumentOffset));
2607  __ Pop(scratch, scratch2, scratch3);
2608  __ Pop(result_reg);
2609  __ Ret();
2610 }
2611 
2612 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2613  const Register exponent = a2;
2614  const DoubleRegister double_base = f2;
2615  const DoubleRegister double_exponent = f4;
2616  const DoubleRegister double_result = f0;
2617  const DoubleRegister double_scratch = f6;
2618  const FPURegister single_scratch = f8;
2619  const Register scratch = t1;
2620  const Register scratch2 = a7;
2621 
2622  Label call_runtime, done, int_exponent;
2623 
2624  Label int_exponent_convert;
2625  // Detect integer exponents stored as double.
2626  __ EmitFPUTruncate(kRoundToMinusInf, scratch, double_exponent, kScratchReg,
2627  double_scratch, scratch2, kCheckForInexactConversion);
2628  // scratch2 == 0 means there was no conversion error.
2629  __ Branch(&int_exponent_convert, eq, scratch2, Operand(zero_reg));
2630 
2631  __ push(ra);
2632  {
2633  AllowExternalCallThatCantCauseGC scope(masm);
2634  __ PrepareCallCFunction(0, 2, scratch2);
2635  __ MovToFloatParameters(double_base, double_exponent);
2636  __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2637  }
2638  __ pop(ra);
2639  __ MovFromFloatResult(double_result);
2640  __ jmp(&done);
2641 
2642  __ bind(&int_exponent_convert);
2643 
2644  // Calculate power with integer exponent.
2645  __ bind(&int_exponent);
2646 
2647  // Get two copies of exponent in the registers scratch and exponent.
2648  // Exponent has previously been stored into scratch as untagged integer.
2649  __ mov(exponent, scratch);
2650 
2651  __ mov_d(double_scratch, double_base); // Back up base.
2652  __ Move(double_result, 1.0);
2653 
2654  // Get absolute value of exponent.
2655  Label positive_exponent, bail_out;
2656  __ Branch(&positive_exponent, ge, scratch, Operand(zero_reg));
2657  __ Dsubu(scratch, zero_reg, scratch);
2658  // Check when Dsubu overflows and we get negative result
2659  // (happens only when input is MIN_INT).
2660  __ Branch(&bail_out, gt, zero_reg, Operand(scratch));
2661  __ bind(&positive_exponent);
2662  __ Assert(ge, AbortReason::kUnexpectedNegativeValue, scratch,
2663  Operand(zero_reg));
2664 
2665  Label while_true, no_carry, loop_end;
2666  __ bind(&while_true);
2667 
2668  __ And(scratch2, scratch, 1);
2669 
2670  __ Branch(&no_carry, eq, scratch2, Operand(zero_reg));
2671  __ mul_d(double_result, double_result, double_scratch);
2672  __ bind(&no_carry);
2673 
2674  __ dsra(scratch, scratch, 1);
2675 
2676  __ Branch(&loop_end, eq, scratch, Operand(zero_reg));
2677  __ mul_d(double_scratch, double_scratch, double_scratch);
2678 
2679  __ Branch(&while_true);
2680 
2681  __ bind(&loop_end);
2682 
2683  __ Branch(&done, ge, exponent, Operand(zero_reg));
2684  __ Move(double_scratch, 1.0);
2685  __ div_d(double_result, double_scratch, double_result);
2686  // Test whether result is zero. Bail out to check for subnormal result.
2687  // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
2688  __ CompareF64(EQ, double_result, kDoubleRegZero);
2689  __ BranchFalseShortF(&done);
2690 
2691  // double_exponent may not contain the exponent value if the input was a
2692  // smi. We set it with exponent value before bailing out.
2693  __ bind(&bail_out);
2694  __ mtc1(exponent, single_scratch);
2695  __ cvt_d_w(double_exponent, single_scratch);
2696 
2697  // Returning or bailing out.
2698  __ push(ra);
2699  {
2700  AllowExternalCallThatCantCauseGC scope(masm);
2701  __ PrepareCallCFunction(0, 2, scratch);
2702  __ MovToFloatParameters(double_base, double_exponent);
2703  __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2704  }
2705  __ pop(ra);
2706  __ MovFromFloatResult(double_result);
2707 
2708  __ bind(&done);
2709  __ Ret();
2710 }
2711 
2712 namespace {
2713 
2714 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2715  ElementsKind kind) {
2716  // Load undefined into the allocation site parameter as required by
2717  // ArrayNArgumentsConstructor.
2718  __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue);
2719 
2720  __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2721  .code(),
2722  RelocInfo::CODE_TARGET, lo, a0, Operand(1));
2723 
2724  __ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
2725  RelocInfo::CODE_TARGET, hi, a0, Operand(1));
2726 
2727  if (IsFastPackedElementsKind(kind)) {
2728  // We might need to create a holey array
2729  // look at the first argument.
2730  __ Ld(kScratchReg, MemOperand(sp, 0));
2731 
2732  __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2733  masm->isolate(), GetHoleyElementsKind(kind))
2734  .code(),
2735  RelocInfo::CODE_TARGET, ne, kScratchReg, Operand(zero_reg));
2736  }
2737 
2738  __ Jump(
2739  CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2740  .code(),
2741  RelocInfo::CODE_TARGET);
2742 }
2743 
2744 } // namespace
2745 
2746 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2747  // ----------- S t a t e -------------
2748  // -- a0 : argc
2749  // -- a1 : constructor
2750  // -- sp[0] : return address
2751  // -- sp[4] : last argument
2752  // -----------------------------------
2753 
2754  if (FLAG_debug_code) {
2755  // The array construct code is only set for the global and natives
2756  // builtin Array functions which always have maps.
2757 
2758  // Initial map for the builtin Array function should be a map.
2759  __ Ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
2760  // Will both indicate a nullptr and a Smi.
2761  __ SmiTst(a3, kScratchReg);
2762  __ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction,
2763  kScratchReg, Operand(zero_reg));
2764  __ GetObjectType(a3, a3, a4);
2765  __ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction, a4,
2766  Operand(MAP_TYPE));
2767  }
2768 
2769  // Figure out the right elements kind.
2770  __ Ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
2771 
2772  // Load the map's "bit field 2" into a3. We only need the first byte,
2773  // but the following bit field extraction takes care of that anyway.
2774  __ Lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset));
2775  // Retrieve elements_kind from bit field 2.
2776  __ DecodeField<Map::ElementsKindBits>(a3);
2777 
2778  if (FLAG_debug_code) {
2779  Label done;
2780  __ Branch(&done, eq, a3, Operand(PACKED_ELEMENTS));
2781  __ Assert(
2782  eq,
2783  AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray,
2784  a3, Operand(HOLEY_ELEMENTS));
2785  __ bind(&done);
2786  }
2787 
2788  Label fast_elements_case;
2789  __ Branch(&fast_elements_case, eq, a3, Operand(PACKED_ELEMENTS));
2790  GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2791 
2792  __ bind(&fast_elements_case);
2793  GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2794 }
2795 
2796 #undef __
2797 
2798 } // namespace internal
2799 } // namespace v8
2800 
2801 #endif // V8_TARGET_ARCH_MIPS64
Definition: libplatform.h:13