V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
builtins-ppc.cc
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_PPC
6 
7 #include "src/assembler-inl.h"
8 #include "src/code-factory.h"
9 #include "src/code-stubs.h"
10 #include "src/counters.h"
11 #include "src/debug/debug.h"
12 #include "src/deoptimizer.h"
13 #include "src/frame-constants.h"
14 #include "src/frames.h"
15 #include "src/objects/js-generator.h"
16 #include "src/objects/smi.h"
17 #include "src/register-configuration.h"
18 #include "src/runtime/runtime.h"
19 #include "src/wasm/wasm-objects.h"
20 
21 namespace v8 {
22 namespace internal {
23 
24 #define __ ACCESS_MASM(masm)
25 
26 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
27  ExitFrameType exit_frame_type) {
28  __ Move(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
29  if (exit_frame_type == BUILTIN_EXIT) {
30  __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
31  RelocInfo::CODE_TARGET);
32  } else {
33  DCHECK(exit_frame_type == EXIT);
34  __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
35  RelocInfo::CODE_TARGET);
36  }
37 }
38 
39 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
40  // ----------- S t a t e -------------
41  // -- r3 : number of arguments
42  // -- lr : return address
43  // -- sp[...]: constructor arguments
44  // -----------------------------------
45  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
46 
47  if (FLAG_debug_code) {
48  // Initial map for the builtin InternalArray functions should be maps.
49  __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
50  __ TestIfSmi(r5, r0);
51  __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction,
52  cr0);
53  __ CompareObjectType(r5, r6, r7, MAP_TYPE);
54  __ Assert(eq, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
55  }
56 
57  // Run the native code for the InternalArray function called as a normal
58  // function.
59  __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
60  RelocInfo::CODE_TARGET);
61 }
62 
63 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
64  Runtime::FunctionId function_id) {
65  // ----------- S t a t e -------------
66  // -- r3 : argument count (preserved for callee)
67  // -- r4 : target function (preserved for callee)
68  // -- r6 : new target (preserved for callee)
69  // -----------------------------------
70  {
71  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
72  // Push the number of arguments to the callee.
73  // Push a copy of the target function and the new target.
74  // Push function as parameter to the runtime call.
75  __ SmiTag(r3);
76  __ Push(r3, r4, r6, r4);
77 
78  __ CallRuntime(function_id, 1);
79  __ mr(r5, r3);
80 
81  // Restore target function and new target.
82  __ Pop(r3, r4, r6);
83  __ SmiUntag(r3);
84  }
85  static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
86  __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
87  __ JumpToJSEntry(r5);
88 }
89 
90 namespace {
91 
92 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
93  Label post_instantiation_deopt_entry;
94  // ----------- S t a t e -------------
95  // -- r3 : number of arguments
96  // -- r4 : constructor function
97  // -- r6 : new target
98  // -- cp : context
99  // -- lr : return address
100  // -- sp[...]: constructor arguments
101  // -----------------------------------
102 
103  // Enter a construct frame.
104  {
105  FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
106 
107  // Preserve the incoming parameters on the stack.
108 
109  __ SmiTag(r3);
110  __ Push(cp, r3);
111  __ SmiUntag(r3, SetRC);
112  // The receiver for the builtin/api call.
113  __ PushRoot(RootIndex::kTheHoleValue);
114  // Set up pointer to last argument.
115  __ addi(r7, fp, Operand(StandardFrameConstants::kCallerSPOffset));
116 
117  // Copy arguments and receiver to the expression stack.
118 
119  Label loop, no_args;
120  // ----------- S t a t e -------------
121  // -- r3: number of arguments (untagged)
122  // -- r4: constructor function
123  // -- r6: new target
124  // -- r7: pointer to last argument
125  // -- cr0: condition indicating whether r3 is zero
126  // -- sp[0*kPointerSize]: the hole (receiver)
127  // -- sp[1*kPointerSize]: number of arguments (tagged)
128  // -- sp[2*kPointerSize]: context
129  // -----------------------------------
130  __ beq(&no_args, cr0);
131  __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
132  __ sub(sp, sp, ip);
133  __ mtctr(r3);
134  __ bind(&loop);
135  __ subi(ip, ip, Operand(kPointerSize));
136  __ LoadPX(r0, MemOperand(r7, ip));
137  __ StorePX(r0, MemOperand(sp, ip));
138  __ bdnz(&loop);
139  __ bind(&no_args);
140 
141  // Call the function.
142  // r3: number of arguments (untagged)
143  // r4: constructor function
144  // r6: new target
145  {
146  ConstantPoolUnavailableScope constant_pool_unavailable(masm);
147  ParameterCount actual(r3);
148  __ InvokeFunction(r4, r6, actual, CALL_FUNCTION);
149  }
150 
151  // Restore context from the frame.
152  __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
153  // Restore smi-tagged arguments count from the frame.
154  __ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
155 
156  // Leave construct frame.
157  }
158  // Remove caller arguments from the stack and return.
159  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
160 
161  __ SmiToPtrArrayOffset(r4, r4);
162  __ add(sp, sp, r4);
163  __ addi(sp, sp, Operand(kPointerSize));
164  __ blr();
165 }
166 
167 void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
168  Register scratch, Label* stack_overflow) {
169  // Check the stack for overflow. We are not trying to catch
170  // interruptions (e.g. debug break and preemption) here, so the "real stack
171  // limit" is checked.
172  __ LoadRoot(scratch, RootIndex::kRealStackLimit);
173  // Make scratch the space we have left. The stack might already be overflowed
174  // here which will cause scratch to become negative.
175  __ sub(scratch, sp, scratch);
176  // Check if the arguments will overflow the stack.
177  __ ShiftLeftImm(r0, num_args, Operand(kPointerSizeLog2));
178  __ cmp(scratch, r0);
179  __ ble(stack_overflow); // Signed comparison.
180 }
181 
182 } // namespace
183 
184 // The construct stub for ES5 constructor functions and ES6 class constructors.
185 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
186  // ----------- S t a t e -------------
187  // -- r3: number of arguments (untagged)
188  // -- r4: constructor function
189  // -- r6: new target
190  // -- cp: context
191  // -- lr: return address
192  // -- sp[...]: constructor arguments
193  // -----------------------------------
194 
195  // Enter a construct frame.
196  {
197  FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
198  Label post_instantiation_deopt_entry, not_create_implicit_receiver;
199 
200  // Preserve the incoming parameters on the stack.
201  __ SmiTag(r3);
202  __ Push(cp, r3, r4);
203  __ PushRoot(RootIndex::kUndefinedValue);
204  __ Push(r6);
205 
206  // ----------- S t a t e -------------
207  // -- sp[0*kPointerSize]: new target
208  // -- sp[1*kPointerSize]: padding
209  // -- r4 and sp[2*kPointerSize]: constructor function
210  // -- sp[3*kPointerSize]: number of arguments (tagged)
211  // -- sp[4*kPointerSize]: context
212  // -----------------------------------
213 
214  __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
215  __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kFlagsOffset));
216  __ TestBitMask(r7, SharedFunctionInfo::IsDerivedConstructorBit::kMask, r0);
217  __ bne(&not_create_implicit_receiver, cr0);
218 
219  // If not derived class constructor: Allocate the new receiver object.
220  __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
221  r7, r8);
222  __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
223  RelocInfo::CODE_TARGET);
224  __ b(&post_instantiation_deopt_entry);
225 
226  // Else: use TheHoleValue as receiver for constructor call
227  __ bind(&not_create_implicit_receiver);
228  __ LoadRoot(r3, RootIndex::kTheHoleValue);
229 
230  // ----------- S t a t e -------------
231  // -- r3: receiver
232  // -- Slot 4 / sp[0*kPointerSize]: new target
233  // -- Slot 3 / sp[1*kPointerSize]: padding
234  // -- Slot 2 / sp[2*kPointerSize]: constructor function
235  // -- Slot 1 / sp[3*kPointerSize]: number of arguments (tagged)
236  // -- Slot 0 / sp[4*kPointerSize]: context
237  // -----------------------------------
238  // Deoptimizer enters here.
239  masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
240  masm->pc_offset());
241  __ bind(&post_instantiation_deopt_entry);
242 
243  // Restore new target.
244  __ Pop(r6);
245  // Push the allocated receiver to the stack. We need two copies
246  // because we may have to return the original one and the calling
247  // conventions dictate that the called function pops the receiver.
248  __ Push(r3, r3);
249 
250  // ----------- S t a t e -------------
251  // -- r6: new target
252  // -- sp[0*kPointerSize]: implicit receiver
253  // -- sp[1*kPointerSize]: implicit receiver
254  // -- sp[2*kPointerSize]: padding
255  // -- sp[3*kPointerSize]: constructor function
256  // -- sp[4*kPointerSize]: number of arguments (tagged)
257  // -- sp[5*kPointerSize]: context
258  // -----------------------------------
259 
260  // Restore constructor function and argument count.
261  __ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
262  __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
263  __ SmiUntag(r3);
264 
265  // Set up pointer to last argument.
266  __ addi(r7, fp, Operand(StandardFrameConstants::kCallerSPOffset));
267 
268  Label enough_stack_space, stack_overflow;
269  Generate_StackOverflowCheck(masm, r3, r8, &stack_overflow);
270  __ b(&enough_stack_space);
271 
272  __ bind(&stack_overflow);
273  // Restore the context from the frame.
274  __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
275  __ CallRuntime(Runtime::kThrowStackOverflow);
276  // Unreachable code.
277  __ bkpt(0);
278 
279  __ bind(&enough_stack_space);
280 
281  // Copy arguments and receiver to the expression stack.
282  Label loop, no_args;
283  // ----------- S t a t e -------------
284  // -- r3: number of arguments (untagged)
285  // -- r6: new target
286  // -- r7: pointer to last argument
287  // -- cr0: condition indicating whether r3 is zero
288  // -- sp[0*kPointerSize]: implicit receiver
289  // -- sp[1*kPointerSize]: implicit receiver
290  // -- sp[2*kPointerSize]: padding
291  // -- r4 and sp[3*kPointerSize]: constructor function
292  // -- sp[4*kPointerSize]: number of arguments (tagged)
293  // -- sp[5*kPointerSize]: context
294  // -----------------------------------
295  __ cmpi(r3, Operand::Zero());
296  __ beq(&no_args);
297  __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
298  __ sub(sp, sp, ip);
299  __ mtctr(r3);
300  __ bind(&loop);
301  __ subi(ip, ip, Operand(kPointerSize));
302  __ LoadPX(r0, MemOperand(r7, ip));
303  __ StorePX(r0, MemOperand(sp, ip));
304  __ bdnz(&loop);
305  __ bind(&no_args);
306 
307  // Call the function.
308  {
309  ConstantPoolUnavailableScope constant_pool_unavailable(masm);
310  ParameterCount actual(r3);
311  __ InvokeFunction(r4, r6, actual, CALL_FUNCTION);
312  }
313 
314  // ----------- S t a t e -------------
315  // -- r0: constructor result
316  // -- sp[0*kPointerSize]: implicit receiver
317  // -- sp[1*kPointerSize]: padding
318  // -- sp[2*kPointerSize]: constructor function
319  // -- sp[3*kPointerSize]: number of arguments
320  // -- sp[4*kPointerSize]: context
321  // -----------------------------------
322 
323  // Store offset of return address for deoptimizer.
324  masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
325  masm->pc_offset());
326 
327  // Restore the context from the frame.
328  __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
329 
330  // If the result is an object (in the ECMA sense), we should get rid
331  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
332  // on page 74.
333  Label use_receiver, do_throw, leave_frame;
334 
335  // If the result is undefined, we jump out to using the implicit receiver.
336  __ JumpIfRoot(r3, RootIndex::kUndefinedValue, &use_receiver);
337 
338  // Otherwise we do a smi check and fall through to check if the return value
339  // is a valid receiver.
340 
341  // If the result is a smi, it is *not* an object in the ECMA sense.
342  __ JumpIfSmi(r3, &use_receiver);
343 
344  // If the type of the result (stored in its map) is less than
345  // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
346  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
347  __ CompareObjectType(r3, r7, r7, FIRST_JS_RECEIVER_TYPE);
348  __ bge(&leave_frame);
349  __ b(&use_receiver);
350 
351  __ bind(&do_throw);
352  __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
353 
354  // Throw away the result of the constructor invocation and use the
355  // on-stack receiver as the result.
356  __ bind(&use_receiver);
357  __ LoadP(r3, MemOperand(sp));
358  __ JumpIfRoot(r3, RootIndex::kTheHoleValue, &do_throw);
359 
360  __ bind(&leave_frame);
361  // Restore smi-tagged arguments count from the frame.
362  __ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
363  // Leave construct frame.
364  }
365 
366  // Remove caller arguments from the stack and return.
367  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
368 
369  __ SmiToPtrArrayOffset(r4, r4);
370  __ add(sp, sp, r4);
371  __ addi(sp, sp, Operand(kPointerSize));
372  __ blr();
373 }
374 
375 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
376  Generate_JSBuiltinsConstructStubHelper(masm);
377 }
378 
379 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
380  Register sfi_data,
381  Register scratch1) {
382  Label done;
383 
384  __ CompareObjectType(sfi_data, scratch1, scratch1, INTERPRETER_DATA_TYPE);
385  __ bne(&done);
386  __ LoadP(sfi_data,
387  FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
388  __ bind(&done);
389 }
390 
391 // static
392 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
393  // ----------- S t a t e -------------
394  // -- r3 : the value to pass to the generator
395  // -- r4 : the JSGeneratorObject to resume
396  // -- lr : return address
397  // -----------------------------------
398  __ AssertGeneratorObject(r4);
399 
400  // Store input value into generator object.
401  __ StoreP(r3, FieldMemOperand(r4, JSGeneratorObject::kInputOrDebugPosOffset),
402  r0);
403  __ RecordWriteField(r4, JSGeneratorObject::kInputOrDebugPosOffset, r3, r6,
404  kLRHasNotBeenSaved, kDontSaveFPRegs);
405 
406  // Load suspended function and context.
407  __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
408  __ LoadP(cp, FieldMemOperand(r7, JSFunction::kContextOffset));
409 
410  // Flood function if we are stepping.
411  Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
412  Label stepping_prepared;
413  ExternalReference debug_hook =
414  ExternalReference::debug_hook_on_function_call_address(masm->isolate());
415  __ Move(ip, debug_hook);
416  __ LoadByte(ip, MemOperand(ip), r0);
417  __ extsb(ip, ip);
418  __ CmpSmiLiteral(ip, Smi::zero(), r0);
419  __ bne(&prepare_step_in_if_stepping);
420 
421  // Flood function if we need to continue stepping in the suspended generator.
422 
423  ExternalReference debug_suspended_generator =
424  ExternalReference::debug_suspended_generator_address(masm->isolate());
425 
426  __ Move(ip, debug_suspended_generator);
427  __ LoadP(ip, MemOperand(ip));
428  __ cmp(ip, r4);
429  __ beq(&prepare_step_in_suspended_generator);
430  __ bind(&stepping_prepared);
431 
432  // Check the stack for overflow. We are not trying to catch interruptions
433  // (i.e. debug break and preemption) here, so check the "real stack limit".
434  Label stack_overflow;
435  __ CompareRoot(sp, RootIndex::kRealStackLimit);
436  __ blt(&stack_overflow);
437 
438  // Push receiver.
439  __ LoadP(ip, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
440  __ Push(ip);
441 
442  // ----------- S t a t e -------------
443  // -- r4 : the JSGeneratorObject to resume
444  // -- r7 : generator function
445  // -- cp : generator context
446  // -- lr : return address
447  // -- sp[0] : generator receiver
448  // -----------------------------------
449 
450  // Copy the function arguments from the generator object's register file.
451  __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
452  __ LoadHalfWord(
453  r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
454  __ LoadP(r5, FieldMemOperand(
455  r4, JSGeneratorObject::kParametersAndRegistersOffset));
456  {
457  Label loop, done_loop;
458  __ cmpi(r6, Operand::Zero());
459  __ ble(&done_loop);
460 
461  // setup r9 to first element address - kPointerSize
462  __ addi(r9, r5,
463  Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
464 
465  __ mtctr(r6);
466  __ bind(&loop);
467  __ LoadPU(ip, MemOperand(r9, kPointerSize));
468  __ push(ip);
469  __ bdnz(&loop);
470 
471  __ bind(&done_loop);
472  }
473 
474  // Underlying function needs to have bytecode available.
475  if (FLAG_debug_code) {
476  __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
477  __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
478  GetSharedFunctionInfoBytecode(masm, r6, r3);
479  __ CompareObjectType(r6, r6, r6, BYTECODE_ARRAY_TYPE);
480  __ Assert(eq, AbortReason::kMissingBytecodeArray);
481  }
482 
483  // Resume (Ignition/TurboFan) generator object.
484  {
485  // We abuse new.target both to indicate that this is a resume call and to
486  // pass in the generator object. In ordinary calls, new.target is always
487  // undefined because generator functions are non-constructable.
488  __ mr(r6, r4);
489  __ mr(r4, r7);
490  static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
491  __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
492  __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
493  __ JumpToJSEntry(r5);
494  }
495 
496  __ bind(&prepare_step_in_if_stepping);
497  {
498  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
499  __ Push(r4, r7);
500  // Push hole as receiver since we do not use it for stepping.
501  __ PushRoot(RootIndex::kTheHoleValue);
502  __ CallRuntime(Runtime::kDebugOnFunctionCall);
503  __ Pop(r4);
504  __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
505  }
506  __ b(&stepping_prepared);
507 
508  __ bind(&prepare_step_in_suspended_generator);
509  {
510  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
511  __ Push(r4);
512  __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
513  __ Pop(r4);
514  __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
515  }
516  __ b(&stepping_prepared);
517 
518  __ bind(&stack_overflow);
519  {
520  FrameScope scope(masm, StackFrame::INTERNAL);
521  __ CallRuntime(Runtime::kThrowStackOverflow);
522  __ bkpt(0); // This should be unreachable.
523  }
524 }
525 
526 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
527  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
528  __ push(r4);
529  __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
530 }
531 
532 // Clobbers r5; preserves all other registers.
533 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
534  // Check the stack for overflow. We are not trying to catch
535  // interruptions (e.g. debug break and preemption) here, so the "real stack
536  // limit" is checked.
537  Label okay;
538  __ LoadRoot(r5, RootIndex::kRealStackLimit);
539  // Make r5 the space we have left. The stack might already be overflowed
540  // here which will cause r5 to become negative.
541  __ sub(r5, sp, r5);
542  // Check if the arguments will overflow the stack.
543  __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
544  __ cmp(r5, r0);
545  __ bgt(&okay); // Signed comparison.
546 
547  // Out of stack space.
548  __ CallRuntime(Runtime::kThrowStackOverflow);
549 
550  __ bind(&okay);
551 }
552 
553 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
554  bool is_construct) {
555  // Called from Generate_JS_Entry
556  // r3: new.target
557  // r4: function
558  // r5: receiver
559  // r6: argc
560  // r7: argv
561  // r0,r8-r9, cp may be clobbered
562 
563  // Enter an internal frame.
564  {
565  FrameScope scope(masm, StackFrame::INTERNAL);
566 
567  // Setup the context (we need to use the caller context from the isolate).
568  ExternalReference context_address = ExternalReference::Create(
569  IsolateAddressId::kContextAddress, masm->isolate());
570  __ Move(cp, context_address);
571  __ LoadP(cp, MemOperand(cp));
572 
573  // Push the function and the receiver onto the stack.
574  __ Push(r4, r5);
575 
576  // Check if we have enough stack space to push all arguments.
577  // Clobbers r5.
578  Generate_CheckStackOverflow(masm, r6);
579 
580  // Copy arguments to the stack in a loop.
581  // r4: function
582  // r6: argc
583  // r7: argv, i.e. points to first arg
584  Label loop, entry;
585  __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
586  __ add(r5, r7, r0);
587  // r5 points past last arg.
588  __ b(&entry);
589  __ bind(&loop);
590  __ LoadP(r8, MemOperand(r7)); // read next parameter
591  __ addi(r7, r7, Operand(kPointerSize));
592  __ LoadP(r0, MemOperand(r8)); // dereference handle
593  __ push(r0); // push parameter
594  __ bind(&entry);
595  __ cmp(r7, r5);
596  __ bne(&loop);
597 
598  // Setup new.target and argc.
599  __ mr(r7, r3);
600  __ mr(r3, r6);
601  __ mr(r6, r7);
602 
603  // Initialize all JavaScript callee-saved registers, since they will be seen
604  // by the garbage collector as part of handlers.
605  __ LoadRoot(r7, RootIndex::kUndefinedValue);
606  __ mr(r14, r7);
607  __ mr(r15, r7);
608  __ mr(r16, r7);
609  __ mr(r17, r7);
610 
611  // Invoke the code.
612  Handle<Code> builtin = is_construct
613  ? BUILTIN_CODE(masm->isolate(), Construct)
614  : masm->isolate()->builtins()->Call();
615  __ Call(builtin, RelocInfo::CODE_TARGET);
616 
617  // Exit the JS frame and remove the parameters (except function), and
618  // return.
619  }
620  __ blr();
621 
622  // r3: result
623 }
624 
625 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
626  Generate_JSEntryTrampolineHelper(masm, false);
627 }
628 
629 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
630  Generate_JSEntryTrampolineHelper(masm, true);
631 }
632 
633 static void ReplaceClosureCodeWithOptimizedCode(
634  MacroAssembler* masm, Register optimized_code, Register closure,
635  Register scratch1, Register scratch2, Register scratch3) {
636  // Store code entry in the closure.
637  __ StoreP(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset),
638  r0);
639  __ mr(scratch1, optimized_code); // Write barrier clobbers scratch1 below.
640  __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
641  kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
642  OMIT_SMI_CHECK);
643 }
644 
645 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
646  Register args_count = scratch;
647 
648  // Get the arguments + receiver count.
649  __ LoadP(args_count,
650  MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
651  __ lwz(args_count,
652  FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
653 
654  // Leave the frame (also dropping the register file).
655  __ LeaveFrame(StackFrame::INTERPRETED);
656 
657  __ add(sp, sp, args_count);
658 }
659 
660 // Tail-call |function_id| if |smi_entry| == |marker|
661 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
662  Register smi_entry,
663  OptimizationMarker marker,
664  Runtime::FunctionId function_id) {
665  Label no_match;
666  __ CmpSmiLiteral(smi_entry, Smi::FromEnum(marker), r0);
667  __ bne(&no_match);
668  GenerateTailCallToReturnedCode(masm, function_id);
669  __ bind(&no_match);
670 }
671 
672 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
673  Register feedback_vector,
674  Register scratch1, Register scratch2,
675  Register scratch3) {
676  // ----------- S t a t e -------------
677  // -- r0 : argument count (preserved for callee if needed, and caller)
678  // -- r3 : new target (preserved for callee if needed, and caller)
679  // -- r1 : target function (preserved for callee if needed, and caller)
680  // -- feedback vector (preserved for caller if needed)
681  // -----------------------------------
682  DCHECK(
683  !AreAliased(feedback_vector, r3, r4, r6, scratch1, scratch2, scratch3));
684 
685  Label optimized_code_slot_is_weak_ref, fallthrough;
686 
687  Register closure = r4;
688  Register optimized_code_entry = scratch1;
689 
690  __ LoadP(
691  optimized_code_entry,
692  FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
693 
694  // Check if the code entry is a Smi. If yes, we interpret it as an
695  // optimisation marker. Otherwise, interpret it as a weak reference to a code
696  // object.
697  __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
698 
699  {
700  // Optimized code slot is a Smi optimization marker.
701 
702  // Fall through if no optimization trigger.
703  __ CmpSmiLiteral(optimized_code_entry,
704  Smi::FromEnum(OptimizationMarker::kNone), r0);
705  __ beq(&fallthrough);
706 
707  TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
708  OptimizationMarker::kLogFirstExecution,
709  Runtime::kFunctionFirstExecution);
710  TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
711  OptimizationMarker::kCompileOptimized,
712  Runtime::kCompileOptimized_NotConcurrent);
713  TailCallRuntimeIfMarkerEquals(
714  masm, optimized_code_entry,
715  OptimizationMarker::kCompileOptimizedConcurrent,
716  Runtime::kCompileOptimized_Concurrent);
717 
718  {
719  // Otherwise, the marker is InOptimizationQueue, so fall through hoping
720  // that an interrupt will eventually update the slot with optimized code.
721  if (FLAG_debug_code) {
722  __ CmpSmiLiteral(
723  optimized_code_entry,
724  Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0);
725  __ Assert(eq, AbortReason::kExpectedOptimizationSentinel);
726  }
727  __ b(&fallthrough);
728  }
729  }
730 
731  {
732  // Optimized code slot is a weak reference.
733  __ bind(&optimized_code_slot_is_weak_ref);
734 
735  __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
736 
737  // Check if the optimized code is marked for deopt. If it is, call the
738  // runtime to clear it.
739  Label found_deoptimized_code;
740  __ LoadP(scratch2, FieldMemOperand(optimized_code_entry,
741  Code::kCodeDataContainerOffset));
742  __ LoadWordArith(
743  scratch2,
744  FieldMemOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset));
745  __ TestBit(scratch2, Code::kMarkedForDeoptimizationBit, r0);
746  __ bne(&found_deoptimized_code, cr0);
747 
748  // Optimized code is good, get it into the closure and link the closure into
749  // the optimized functions list, then tail call the optimized code.
750  // The feedback vector is no longer used, so re-use it as a scratch
751  // register.
752  ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
753  scratch2, scratch3, feedback_vector);
754  static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
755  __ addi(r5, optimized_code_entry,
756  Operand(Code::kHeaderSize - kHeapObjectTag));
757  __ Jump(r5);
758 
759  // Optimized code slot contains deoptimized code, evict it and re-enter the
760  // closure's code.
761  __ bind(&found_deoptimized_code);
762  GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
763  }
764 
765  // Fall-through if the optimized code cell is clear and there is no
766  // optimization marker.
767  __ bind(&fallthrough);
768 }
769 
770 // Advance the current bytecode offset. This simulates what all bytecode
771 // handlers do upon completion of the underlying operation. Will bail out to a
772 // label if the bytecode (without prefix) is a return bytecode.
773 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
774  Register bytecode_array,
775  Register bytecode_offset,
776  Register bytecode, Register scratch1,
777  Label* if_return) {
778  Register bytecode_size_table = scratch1;
779  Register scratch2 = bytecode;
780  DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
781  bytecode));
782  __ Move(bytecode_size_table,
783  ExternalReference::bytecode_size_table_address());
784 
785  // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
786  Label process_bytecode, extra_wide;
787  STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
788  STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
789  STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
790  STATIC_ASSERT(3 ==
791  static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
792  __ cmpi(bytecode, Operand(0x3));
793  __ bgt(&process_bytecode);
794  __ andi(r0, bytecode, Operand(0x1));
795  __ bne(&extra_wide, cr0);
796 
797  // Load the next bytecode and update table to the wide scaled table.
798  __ addi(bytecode_offset, bytecode_offset, Operand(1));
799  __ lbzx(bytecode, MemOperand(bytecode_array, bytecode_offset));
800  __ addi(bytecode_size_table, bytecode_size_table,
801  Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
802  __ b(&process_bytecode);
803 
804  __ bind(&extra_wide);
805  // Load the next bytecode and update table to the extra wide scaled table.
806  __ addi(bytecode_offset, bytecode_offset, Operand(1));
807  __ lbzx(bytecode, MemOperand(bytecode_array, bytecode_offset));
808  __ addi(bytecode_size_table, bytecode_size_table,
809  Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
810 
811  // Load the size of the current bytecode.
812  __ bind(&process_bytecode);
813 
814 // Bailout to the return label if this is a return bytecode.
815 #define JUMP_IF_EQUAL(NAME) \
816  __ cmpi(bytecode, \
817  Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \
818  __ beq(if_return);
819  RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
820 #undef JUMP_IF_EQUAL
821 
822  // Otherwise, load the size of the current bytecode and advance the offset.
823  __ ShiftLeftImm(scratch2, bytecode, Operand(2));
824  __ lwzx(scratch2, MemOperand(bytecode_size_table, scratch2));
825  __ add(bytecode_offset, bytecode_offset, scratch2);
826 }
827 // Generate code for entering a JS function with the interpreter.
828 // On entry to the function the receiver and arguments have been pushed on the
829 // stack left to right. The actual argument count matches the formal parameter
830 // count expected by the function.
831 //
832 // The live registers are:
833 // o r4: the JS function object being called.
834 // o r6: the incoming new target or generator object
835 // o cp: our context
836 // o pp: the caller's constant pool pointer (if enabled)
837 // o fp: the caller's frame pointer
838 // o sp: stack pointer
839 // o lr: return address
840 //
841 // The function builds an interpreter frame. See InterpreterFrameConstants in
842 // frames.h for its layout.
843 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
844  Register closure = r4;
845  Register feedback_vector = r5;
846 
847  // Load the feedback vector from the closure.
848  __ LoadP(feedback_vector,
849  FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
850  __ LoadP(feedback_vector,
851  FieldMemOperand(feedback_vector, Cell::kValueOffset));
852  // Read off the optimized code slot in the feedback vector, and if there
853  // is optimized code or an optimization marker, call that instead.
854  MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, r7, r9, r8);
855 
856  // Open a frame scope to indicate that there is a frame on the stack. The
857  // MANUAL indicates that the scope shouldn't actually generate code to set up
858  // the frame (that is done below).
859  FrameScope frame_scope(masm, StackFrame::MANUAL);
860  __ PushStandardFrame(closure);
861 
862  // Get the bytecode array from the function object and load it into
863  // kInterpreterBytecodeArrayRegister.
864  __ LoadP(r3, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
865  // Load original bytecode array or the debug copy.
866  __ LoadP(kInterpreterBytecodeArrayRegister,
867  FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
868  GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, r7);
869 
870  // Increment invocation count for the function.
871  __ LoadWord(
872  r8,
873  FieldMemOperand(feedback_vector, FeedbackVector::kInvocationCountOffset),
874  r0);
875  __ addi(r8, r8, Operand(1));
876  __ StoreWord(
877  r8,
878  FieldMemOperand(feedback_vector, FeedbackVector::kInvocationCountOffset),
879  r0);
880 
881  // Check function data field is actually a BytecodeArray object.
882 
883  if (FLAG_debug_code) {
884  __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
885  __ Assert(ne,
886  AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
887  cr0);
888  __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
889  BYTECODE_ARRAY_TYPE);
890  __ Assert(
891  eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
892  }
893 
894  // Reset code age.
895  __ mov(r8, Operand(BytecodeArray::kNoAgeBytecodeAge));
896  __ StoreByte(r8, FieldMemOperand(kInterpreterBytecodeArrayRegister,
897  BytecodeArray::kBytecodeAgeOffset),
898  r0);
899 
900  // Load initial bytecode offset.
901  __ mov(kInterpreterBytecodeOffsetRegister,
902  Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
903 
904  // Push bytecode array and Smi tagged bytecode array offset.
905  __ SmiTag(r3, kInterpreterBytecodeOffsetRegister);
906  __ Push(kInterpreterBytecodeArrayRegister, r3);
907 
908  // Allocate the local and temporary register file on the stack.
909  {
910  // Load frame size (word) from the BytecodeArray object.
911  __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister,
912  BytecodeArray::kFrameSizeOffset));
913 
914  // Do a stack check to ensure we don't go over the limit.
915  Label ok;
916  __ sub(r8, sp, r5);
917  __ LoadRoot(r0, RootIndex::kRealStackLimit);
918  __ cmpl(r8, r0);
919  __ bge(&ok);
920  __ CallRuntime(Runtime::kThrowStackOverflow);
921  __ bind(&ok);
922 
923  // If ok, push undefined as the initial value for all register file entries.
924  // TODO(rmcilroy): Consider doing more than one push per loop iteration.
925  Label loop, no_args;
926  __ LoadRoot(r8, RootIndex::kUndefinedValue);
927  __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC);
928  __ beq(&no_args, cr0);
929  __ mtctr(r5);
930  __ bind(&loop);
931  __ push(r8);
932  __ bdnz(&loop);
933  __ bind(&no_args);
934  }
935 
936  // If the bytecode array has a valid incoming new target or generator object
937  // register, initialize it with incoming value which was passed in r6.
938  Label no_incoming_new_target_or_generator_register;
939  __ LoadWordArith(
940  r8, FieldMemOperand(
941  kInterpreterBytecodeArrayRegister,
942  BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
943  __ cmpi(r8, Operand::Zero());
944  __ beq(&no_incoming_new_target_or_generator_register);
945  __ ShiftLeftImm(r8, r8, Operand(kPointerSizeLog2));
946  __ StorePX(r6, MemOperand(fp, r8));
947  __ bind(&no_incoming_new_target_or_generator_register);
948 
949  // Load accumulator with undefined.
950  __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
951  // Load the dispatch table into a register and dispatch to the bytecode
952  // handler at the current bytecode offset.
953  Label do_dispatch;
954  __ bind(&do_dispatch);
955  __ Move(
956  kInterpreterDispatchTableRegister,
957  ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
958  __ lbzx(r6, MemOperand(kInterpreterBytecodeArrayRegister,
959  kInterpreterBytecodeOffsetRegister));
960  __ ShiftLeftImm(r6, r6, Operand(kPointerSizeLog2));
961  __ LoadPX(kJavaScriptCallCodeStartRegister,
962  MemOperand(kInterpreterDispatchTableRegister, r6));
963  __ Call(kJavaScriptCallCodeStartRegister);
964 
965  masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
966 
967  // Any returns to the entry trampoline are either due to the return bytecode
968  // or the interpreter tail calling a builtin and then a dispatch.
969 
970  // Get bytecode array and bytecode offset from the stack frame.
971  __ LoadP(kInterpreterBytecodeArrayRegister,
972  MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
973  __ LoadP(kInterpreterBytecodeOffsetRegister,
974  MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
975  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
976 
977  // Either return, or advance to the next bytecode and dispatch.
978  Label do_return;
979  __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
980  kInterpreterBytecodeOffsetRegister));
981  AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
982  kInterpreterBytecodeOffsetRegister, r4, r5,
983  &do_return);
984  __ b(&do_dispatch);
985 
986  __ bind(&do_return);
987  // The return value is in r3.
988  LeaveInterpreterFrame(masm, r5);
989  __ blr();
990 }
991 
992 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
993  Register num_args, Register index,
994  Register count, Register scratch) {
995  Label loop, skip;
996  __ cmpi(count, Operand::Zero());
997  __ beq(&skip);
998  __ addi(index, index, Operand(kPointerSize)); // Bias up for LoadPU
999  __ mtctr(count);
1000  __ bind(&loop);
1001  __ LoadPU(scratch, MemOperand(index, -kPointerSize));
1002  __ push(scratch);
1003  __ bdnz(&loop);
1004  __ bind(&skip);
1005 }
1006 
1007 // static
1008 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1009  MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1010  InterpreterPushArgsMode mode) {
1011  DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1012  // ----------- S t a t e -------------
1013  // -- r3 : the number of arguments (not including the receiver)
1014  // -- r5 : the address of the first argument to be pushed. Subsequent
1015  // arguments should be consecutive above this, in the same order as
1016  // they are to be pushed onto the stack.
1017  // -- r4 : the target to call (can be any Object).
1018  // -----------------------------------
1019  Label stack_overflow;
1020 
1021  // Calculate number of arguments (add one for receiver).
1022  __ addi(r6, r3, Operand(1));
1023 
1024  Generate_StackOverflowCheck(masm, r6, ip, &stack_overflow);
1025 
1026  // Push "undefined" as the receiver arg if we need to.
1027  if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1028  __ PushRoot(RootIndex::kUndefinedValue);
1029  __ mr(r6, r3); // Argument count is correct.
1030  }
1031 
1032  // Push the arguments. r5, r6, r7 will be modified.
1033  Generate_InterpreterPushArgs(masm, r6, r5, r6, r7);
1034 
1035  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1036  __ Pop(r5); // Pass the spread in a register
1037  __ subi(r3, r3, Operand(1)); // Subtract one for spread
1038  }
1039 
1040  // Call the target.
1041  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1042  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1043  RelocInfo::CODE_TARGET);
1044  } else {
1045  __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1046  RelocInfo::CODE_TARGET);
1047  }
1048 
1049  __ bind(&stack_overflow);
1050  {
1051  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1052  // Unreachable Code.
1053  __ bkpt(0);
1054  }
1055 }
1056 
1057 // static
1058 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1059  MacroAssembler* masm, InterpreterPushArgsMode mode) {
1060  // ----------- S t a t e -------------
1061  // -- r3 : argument count (not including receiver)
1062  // -- r6 : new target
1063  // -- r4 : constructor to call
1064  // -- r5 : allocation site feedback if available, undefined otherwise.
1065  // -- r7 : address of the first argument
1066  // -----------------------------------
1067  Label stack_overflow;
1068 
1069  // Push a slot for the receiver to be constructed.
1070  __ li(r0, Operand::Zero());
1071  __ push(r0);
1072 
1073  // Push the arguments (skip if none).
1074  Label skip;
1075  __ cmpi(r3, Operand::Zero());
1076  __ beq(&skip);
1077  Generate_StackOverflowCheck(masm, r3, ip, &stack_overflow);
1078  // Push the arguments. r8, r7, r9 will be modified.
1079  Generate_InterpreterPushArgs(masm, r3, r7, r3, r9);
1080  __ bind(&skip);
1081  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1082  __ Pop(r5); // Pass the spread in a register
1083  __ subi(r3, r3, Operand(1)); // Subtract one for spread
1084  } else {
1085  __ AssertUndefinedOrAllocationSite(r5, r8);
1086  }
1087  if (mode == InterpreterPushArgsMode::kArrayFunction) {
1088  __ AssertFunction(r4);
1089 
1090  // Tail call to the array construct stub (still in the caller
1091  // context at this point).
1092  Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1093  __ Jump(code, RelocInfo::CODE_TARGET);
1094  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1095  // Call the constructor with r3, r4, and r6 unmodified.
1096  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1097  RelocInfo::CODE_TARGET);
1098  } else {
1099  DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1100  // Call the constructor with r3, r4, and r6 unmodified.
1101  __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1102  }
1103 
1104  __ bind(&stack_overflow);
1105  {
1106  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1107  // Unreachable Code.
1108  __ bkpt(0);
1109  }
1110 }
1111 
1112 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1113  // Set the return address to the correct point in the interpreter entry
1114  // trampoline.
1115  Label builtin_trampoline, trampoline_loaded;
1116  Smi interpreter_entry_return_pc_offset(
1117  masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1118  DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1119 
1120  // If the SFI function_data is an InterpreterData, the function will have a
1121  // custom copy of the interpreter entry trampoline for profiling. If so,
1122  // get the custom trampoline, otherwise grab the entry address of the global
1123  // trampoline.
1124  __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1125  __ LoadP(r5, FieldMemOperand(r5, JSFunction::kSharedFunctionInfoOffset));
1126  __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
1127  __ CompareObjectType(r5, kInterpreterDispatchTableRegister,
1128  kInterpreterDispatchTableRegister,
1129  INTERPRETER_DATA_TYPE);
1130  __ bne(&builtin_trampoline);
1131 
1132  __ LoadP(r5,
1133  FieldMemOperand(r5, InterpreterData::kInterpreterTrampolineOffset));
1134  __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
1135  __ b(&trampoline_loaded);
1136 
1137  __ bind(&builtin_trampoline);
1138  __ Move(r5, ExternalReference::
1139  address_of_interpreter_entry_trampoline_instruction_start(
1140  masm->isolate()));
1141  __ LoadP(r5, MemOperand(r5));
1142 
1143  __ bind(&trampoline_loaded);
1144  __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset->value()));
1145  __ mtlr(r0);
1146 
1147  // Initialize the dispatch table register.
1148  __ Move(
1149  kInterpreterDispatchTableRegister,
1150  ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1151 
1152  // Get the bytecode array pointer from the frame.
1153  __ LoadP(kInterpreterBytecodeArrayRegister,
1154  MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1155 
1156  if (FLAG_debug_code) {
1157  // Check function data field is actually a BytecodeArray object.
1158  __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1159  __ Assert(ne,
1160  AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1161  cr0);
1162  __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg,
1163  BYTECODE_ARRAY_TYPE);
1164  __ Assert(
1165  eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1166  }
1167 
1168  // Get the target bytecode offset from the frame.
1169  __ LoadP(kInterpreterBytecodeOffsetRegister,
1170  MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1171  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1172 
1173  // Dispatch to the target bytecode.
1174  __ lbzx(ip, MemOperand(kInterpreterBytecodeArrayRegister,
1175  kInterpreterBytecodeOffsetRegister));
1176  __ ShiftLeftImm(ip, ip, Operand(kPointerSizeLog2));
1177  __ LoadPX(kJavaScriptCallCodeStartRegister,
1178  MemOperand(kInterpreterDispatchTableRegister, ip));
1179  __ Jump(kJavaScriptCallCodeStartRegister);
1180 }
1181 
1182 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1183  // Get bytecode array and bytecode offset from the stack frame.
1184  __ LoadP(kInterpreterBytecodeArrayRegister,
1185  MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1186  __ LoadP(kInterpreterBytecodeOffsetRegister,
1187  MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1188  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1189 
1190  // Load the current bytecode.
1191  __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1192  kInterpreterBytecodeOffsetRegister));
1193 
1194  // Advance to the next bytecode.
1195  Label if_return;
1196  AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1197  kInterpreterBytecodeOffsetRegister, r4, r5,
1198  &if_return);
1199 
1200  // Convert new bytecode offset to a Smi and save in the stackframe.
1201  __ SmiTag(r5, kInterpreterBytecodeOffsetRegister);
1202  __ StoreP(r5,
1203  MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1204 
1205  Generate_InterpreterEnterBytecode(masm);
1206 
1207  // We should never take the if_return path.
1208  __ bind(&if_return);
1209  __ Abort(AbortReason::kInvalidBytecodeAdvance);
1210 }
1211 
1212 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1213  Generate_InterpreterEnterBytecode(masm);
1214 }
1215 
1216 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1217  // ----------- S t a t e -------------
1218  // -- r3 : argument count (preserved for callee)
1219  // -- r4 : new target (preserved for callee)
1220  // -- r6 : target function (preserved for callee)
1221  // -----------------------------------
1222  Label failed;
1223  {
1224  FrameScope scope(masm, StackFrame::INTERNAL);
1225  // Preserve argument count for later compare.
1226  __ Move(r7, r3);
1227  // Push a copy of the target function and the new target.
1228  // Push function as parameter to the runtime call.
1229  __ SmiTag(r3);
1230  __ Push(r3, r4, r6, r4);
1231 
1232  // Copy arguments from caller (stdlib, foreign, heap).
1233  Label args_done;
1234  for (int j = 0; j < 4; ++j) {
1235  Label over;
1236  if (j < 3) {
1237  __ cmpi(r7, Operand(j));
1238  __ bne(&over);
1239  }
1240  for (int i = j - 1; i >= 0; --i) {
1241  __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1242  i * kPointerSize));
1243  __ push(r7);
1244  }
1245  for (int i = 0; i < 3 - j; ++i) {
1246  __ PushRoot(RootIndex::kUndefinedValue);
1247  }
1248  if (j < 3) {
1249  __ jmp(&args_done);
1250  __ bind(&over);
1251  }
1252  }
1253  __ bind(&args_done);
1254 
1255  // Call runtime, on success unwind frame, and parent frame.
1256  __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1257  // A smi 0 is returned on failure, an object on success.
1258  __ JumpIfSmi(r3, &failed);
1259 
1260  __ Drop(2);
1261  __ pop(r7);
1262  __ SmiUntag(r7);
1263  scope.GenerateLeaveFrame();
1264 
1265  __ addi(r7, r7, Operand(1));
1266  __ Drop(r7);
1267  __ Ret();
1268 
1269  __ bind(&failed);
1270  // Restore target function and new target.
1271  __ Pop(r3, r4, r6);
1272  __ SmiUntag(r3);
1273  }
1274  // On failure, tail call back to regular js by re-calling the function
1275  // which has be reset to the compile lazy builtin.
1276  static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
1277  __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
1278  __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
1279  __ JumpToJSEntry(r5);
1280 }
1281 
1282 namespace {
1283 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1284  bool java_script_builtin,
1285  bool with_result) {
1286  const RegisterConfiguration* config(RegisterConfiguration::Default());
1287  int allocatable_register_count = config->num_allocatable_general_registers();
1288  if (with_result) {
1289  // Overwrite the hole inserted by the deoptimizer with the return value from
1290  // the LAZY deopt point.
1291  __ StoreP(
1292  r3, MemOperand(
1293  sp, config->num_allocatable_general_registers() * kPointerSize +
1294  BuiltinContinuationFrameConstants::kFixedFrameSize));
1295  }
1296  for (int i = allocatable_register_count - 1; i >= 0; --i) {
1297  int code = config->GetAllocatableGeneralCode(i);
1298  __ Pop(Register::from_code(code));
1299  if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1300  __ SmiUntag(Register::from_code(code));
1301  }
1302  }
1303  __ LoadP(
1304  fp,
1305  MemOperand(sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1306  __ Pop(ip);
1307  __ addi(sp, sp,
1308  Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1309  __ Pop(r0);
1310  __ mtlr(r0);
1311  __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
1312  __ Jump(ip);
1313 }
1314 } // namespace
1315 
1316 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1317  Generate_ContinueToBuiltinHelper(masm, false, false);
1318 }
1319 
1320 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1321  MacroAssembler* masm) {
1322  Generate_ContinueToBuiltinHelper(masm, false, true);
1323 }
1324 
1325 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1326  Generate_ContinueToBuiltinHelper(masm, true, false);
1327 }
1328 
1329 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1330  MacroAssembler* masm) {
1331  Generate_ContinueToBuiltinHelper(masm, true, true);
1332 }
1333 
1334 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1335  {
1336  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1337  __ CallRuntime(Runtime::kNotifyDeoptimized);
1338  }
1339 
1340  DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r3.code());
1341  __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1342  __ addi(sp, sp, Operand(1 * kPointerSize));
1343  __ Ret();
1344 }
1345 
1346 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1347  // Lookup the function in the JavaScript frame.
1348  __ LoadP(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1349  __ LoadP(r3, MemOperand(r3, JavaScriptFrameConstants::kFunctionOffset));
1350 
1351  {
1352  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1353  // Pass function as argument.
1354  __ push(r3);
1355  __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1356  }
1357 
1358  // If the code object is null, just return to the caller.
1359  Label skip;
1360  __ CmpSmiLiteral(r3, Smi::zero(), r0);
1361  __ bne(&skip);
1362  __ Ret();
1363 
1364  __ bind(&skip);
1365 
1366  // Drop the handler frame that is be sitting on top of the actual
1367  // JavaScript frame. This is the case then OSR is triggered from bytecode.
1368  __ LeaveFrame(StackFrame::STUB);
1369 
1370  // Load deoptimization data from the code object.
1371  // <deopt_data> = <code>[#deoptimization_data_offset]
1372  __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1373 
1374  {
1375  ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1376  __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1377 
1378  if (FLAG_enable_embedded_constant_pool) {
1379  __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1380  }
1381 
1382  // Load the OSR entrypoint offset from the deoptimization data.
1383  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1384  __ LoadP(r4,
1385  FieldMemOperand(r4, FixedArray::OffsetOfElementAt(
1386  DeoptimizationData::kOsrPcOffsetIndex)));
1387  __ SmiUntag(r4);
1388 
1389  // Compute the target address = code start + osr_offset
1390  __ add(r0, r3, r4);
1391 
1392  // And "return" to the OSR entry point of the function.
1393  __ mtlr(r0);
1394  __ blr();
1395  }
1396 }
1397 
1398 // static
1399 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1400  // ----------- S t a t e -------------
1401  // -- r3 : argc
1402  // -- sp[0] : argArray
1403  // -- sp[4] : thisArg
1404  // -- sp[8] : receiver
1405  // -----------------------------------
1406 
1407  // 1. Load receiver into r4, argArray into r5 (if present), remove all
1408  // arguments from the stack (including the receiver), and push thisArg (if
1409  // present) instead.
1410  {
1411  Label skip;
1412  Register arg_size = r8;
1413  Register new_sp = r6;
1414  Register scratch = r7;
1415  __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1416  __ add(new_sp, sp, arg_size);
1417  __ LoadRoot(scratch, RootIndex::kUndefinedValue);
1418  __ mr(r5, scratch);
1419  __ LoadP(r4, MemOperand(new_sp, 0)); // receiver
1420  __ cmpi(arg_size, Operand(kPointerSize));
1421  __ blt(&skip);
1422  __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg
1423  __ beq(&skip);
1424  __ LoadP(r5, MemOperand(new_sp, 2 * -kPointerSize)); // argArray
1425  __ bind(&skip);
1426  __ mr(sp, new_sp);
1427  __ StoreP(scratch, MemOperand(sp, 0));
1428  }
1429 
1430  // ----------- S t a t e -------------
1431  // -- r5 : argArray
1432  // -- r4 : receiver
1433  // -- sp[0] : thisArg
1434  // -----------------------------------
1435 
1436  // 2. We don't need to check explicitly for callable receiver here,
1437  // since that's the first thing the Call/CallWithArrayLike builtins
1438  // will do.
1439 
1440  // 3. Tail call with no arguments if argArray is null or undefined.
1441  Label no_arguments;
1442  __ JumpIfRoot(r5, RootIndex::kNullValue, &no_arguments);
1443  __ JumpIfRoot(r5, RootIndex::kUndefinedValue, &no_arguments);
1444 
1445  // 4a. Apply the receiver to the given argArray.
1446  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1447  RelocInfo::CODE_TARGET);
1448 
1449  // 4b. The argArray is either null or undefined, so we tail call without any
1450  // arguments to the receiver.
1451  __ bind(&no_arguments);
1452  {
1453  __ li(r3, Operand::Zero());
1454  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1455  }
1456 }
1457 
1458 // static
1459 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1460  // 1. Make sure we have at least one argument.
1461  // r3: actual number of arguments
1462  {
1463  Label done;
1464  __ cmpi(r3, Operand::Zero());
1465  __ bne(&done);
1466  __ PushRoot(RootIndex::kUndefinedValue);
1467  __ addi(r3, r3, Operand(1));
1468  __ bind(&done);
1469  }
1470 
1471  // 2. Get the callable to call (passed as receiver) from the stack.
1472  // r3: actual number of arguments
1473  __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
1474  __ LoadPX(r4, MemOperand(sp, r5));
1475 
1476  // 3. Shift arguments and return address one slot down on the stack
1477  // (overwriting the original receiver). Adjust argument count to make
1478  // the original first argument the new receiver.
1479  // r3: actual number of arguments
1480  // r4: callable
1481  {
1482  Label loop;
1483  // Calculate the copy start address (destination). Copy end address is sp.
1484  __ add(r5, sp, r5);
1485 
1486  __ mtctr(r3);
1487  __ bind(&loop);
1488  __ LoadP(ip, MemOperand(r5, -kPointerSize));
1489  __ StoreP(ip, MemOperand(r5));
1490  __ subi(r5, r5, Operand(kPointerSize));
1491  __ bdnz(&loop);
1492  // Adjust the actual number of arguments and remove the top element
1493  // (which is a copy of the last argument).
1494  __ subi(r3, r3, Operand(1));
1495  __ pop();
1496  }
1497 
1498  // 4. Call the callable.
1499  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1500 }
1501 
1502 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1503  // ----------- S t a t e -------------
1504  // -- r3 : argc
1505  // -- sp[0] : argumentsList
1506  // -- sp[4] : thisArgument
1507  // -- sp[8] : target
1508  // -- sp[12] : receiver
1509  // -----------------------------------
1510 
1511  // 1. Load target into r4 (if present), argumentsList into r5 (if present),
1512  // remove all arguments from the stack (including the receiver), and push
1513  // thisArgument (if present) instead.
1514  {
1515  Label skip;
1516  Register arg_size = r8;
1517  Register new_sp = r6;
1518  Register scratch = r7;
1519  __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1520  __ add(new_sp, sp, arg_size);
1521  __ LoadRoot(r4, RootIndex::kUndefinedValue);
1522  __ mr(scratch, r4);
1523  __ mr(r5, r4);
1524  __ cmpi(arg_size, Operand(kPointerSize));
1525  __ blt(&skip);
1526  __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
1527  __ beq(&skip);
1528  __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument
1529  __ cmpi(arg_size, Operand(2 * kPointerSize));
1530  __ beq(&skip);
1531  __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList
1532  __ bind(&skip);
1533  __ mr(sp, new_sp);
1534  __ StoreP(scratch, MemOperand(sp, 0));
1535  }
1536 
1537  // ----------- S t a t e -------------
1538  // -- r5 : argumentsList
1539  // -- r4 : target
1540  // -- sp[0] : thisArgument
1541  // -----------------------------------
1542 
1543  // 2. We don't need to check explicitly for callable target here,
1544  // since that's the first thing the Call/CallWithArrayLike builtins
1545  // will do.
1546 
1547  // 3. Apply the target to the given argumentsList.
1548  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1549  RelocInfo::CODE_TARGET);
1550 }
1551 
1552 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1553  // ----------- S t a t e -------------
1554  // -- r3 : argc
1555  // -- sp[0] : new.target (optional)
1556  // -- sp[4] : argumentsList
1557  // -- sp[8] : target
1558  // -- sp[12] : receiver
1559  // -----------------------------------
1560 
1561  // 1. Load target into r4 (if present), argumentsList into r5 (if present),
1562  // new.target into r6 (if present, otherwise use target), remove all
1563  // arguments from the stack (including the receiver), and push thisArgument
1564  // (if present) instead.
1565  {
1566  Label skip;
1567  Register arg_size = r8;
1568  Register new_sp = r7;
1569  __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1570  __ add(new_sp, sp, arg_size);
1571  __ LoadRoot(r4, RootIndex::kUndefinedValue);
1572  __ mr(r5, r4);
1573  __ mr(r6, r4);
1574  __ StoreP(r4, MemOperand(new_sp, 0)); // receiver (undefined)
1575  __ cmpi(arg_size, Operand(kPointerSize));
1576  __ blt(&skip);
1577  __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
1578  __ mr(r6, r4); // new.target defaults to target
1579  __ beq(&skip);
1580  __ LoadP(r5, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList
1581  __ cmpi(arg_size, Operand(2 * kPointerSize));
1582  __ beq(&skip);
1583  __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize)); // new.target
1584  __ bind(&skip);
1585  __ mr(sp, new_sp);
1586  }
1587 
1588  // ----------- S t a t e -------------
1589  // -- r5 : argumentsList
1590  // -- r6 : new.target
1591  // -- r4 : target
1592  // -- sp[0] : receiver (undefined)
1593  // -----------------------------------
1594 
1595  // 2. We don't need to check explicitly for constructor target here,
1596  // since that's the first thing the Construct/ConstructWithArrayLike
1597  // builtins will do.
1598 
1599  // 3. We don't need to check explicitly for constructor new.target here,
1600  // since that's the second thing the Construct/ConstructWithArrayLike
1601  // builtins will do.
1602 
1603  // 4. Construct the target with the given new.target and argumentsList.
1604  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1605  RelocInfo::CODE_TARGET);
1606 }
1607 
1608 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1609  __ SmiTag(r3);
1610  __ mov(r7, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1611  __ mflr(r0);
1612  __ push(r0);
1613  if (FLAG_enable_embedded_constant_pool) {
1614  __ Push(fp, kConstantPoolRegister, r7, r4, r3);
1615  } else {
1616  __ Push(fp, r7, r4, r3);
1617  }
1618  __ Push(Smi::zero()); // Padding.
1619  __ addi(fp, sp,
1620  Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
1621 }
1622 
1623 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1624  // ----------- S t a t e -------------
1625  // -- r3 : result being passed through
1626  // -----------------------------------
1627  // Get the number of arguments passed (as a smi), tear down the frame and
1628  // then tear down the parameters.
1629  __ LoadP(r4, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1630  int stack_adjustment = kPointerSize; // adjust for receiver
1631  __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1632  __ SmiToPtrArrayOffset(r0, r4);
1633  __ add(sp, sp, r0);
1634 }
1635 
1636 // static
1637 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1638  Handle<Code> code) {
1639  // ----------- S t a t e -------------
1640  // -- r4 : target
1641  // -- r3 : number of parameters on the stack (not including the receiver)
1642  // -- r5 : arguments list (a FixedArray)
1643  // -- r7 : len (number of elements to push from args)
1644  // -- r6 : new.target (for [[Construct]])
1645  // -----------------------------------
1646 
1647  Register scratch = ip;
1648 
1649  if (masm->emit_debug_code()) {
1650  // Allow r5 to be a FixedArray, or a FixedDoubleArray if r7 == 0.
1651  Label ok, fail;
1652  __ AssertNotSmi(r5);
1653  __ LoadP(scratch, FieldMemOperand(r5, HeapObject::kMapOffset));
1654  __ LoadHalfWord(scratch,
1655  FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1656  __ cmpi(scratch, Operand(FIXED_ARRAY_TYPE));
1657  __ beq(&ok);
1658  __ cmpi(scratch, Operand(FIXED_DOUBLE_ARRAY_TYPE));
1659  __ bne(&fail);
1660  __ cmpi(r7, Operand::Zero());
1661  __ beq(&ok);
1662  // Fall through.
1663  __ bind(&fail);
1664  __ Abort(AbortReason::kOperandIsNotAFixedArray);
1665 
1666  __ bind(&ok);
1667  }
1668 
1669  // Check for stack overflow.
1670  Label stack_overflow;
1671  Generate_StackOverflowCheck(masm, r7, ip, &stack_overflow);
1672 
1673  // Push arguments onto the stack (thisArgument is already on the stack).
1674  {
1675  Label loop, no_args, skip;
1676  __ cmpi(r7, Operand::Zero());
1677  __ beq(&no_args);
1678  __ addi(r5, r5,
1679  Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
1680  __ mtctr(r7);
1681  __ bind(&loop);
1682  __ LoadPU(ip, MemOperand(r5, kPointerSize));
1683  __ CompareRoot(ip, RootIndex::kTheHoleValue);
1684  __ bne(&skip);
1685  __ LoadRoot(ip, RootIndex::kUndefinedValue);
1686  __ bind(&skip);
1687  __ push(ip);
1688  __ bdnz(&loop);
1689  __ bind(&no_args);
1690  __ add(r3, r3, r7);
1691  }
1692 
1693  // Tail-call to the actual Call or Construct builtin.
1694  __ Jump(code, RelocInfo::CODE_TARGET);
1695 
1696  __ bind(&stack_overflow);
1697  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1698 }
1699 
1700 // static
1701 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1702  CallOrConstructMode mode,
1703  Handle<Code> code) {
1704  // ----------- S t a t e -------------
1705  // -- r3 : the number of arguments (not including the receiver)
1706  // -- r6 : the new.target (for [[Construct]] calls)
1707  // -- r4 : the target to call (can be any Object)
1708  // -- r5 : start index (to support rest parameters)
1709  // -----------------------------------
1710 
1711  Register scratch = r9;
1712 
1713  if (mode == CallOrConstructMode::kConstruct) {
1714  Label new_target_constructor, new_target_not_constructor;
1715  __ JumpIfSmi(r6, &new_target_not_constructor);
1716  __ LoadP(scratch, FieldMemOperand(r6, HeapObject::kMapOffset));
1717  __ lbz(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1718  __ TestBit(scratch, Map::IsConstructorBit::kShift, r0);
1719  __ bne(&new_target_constructor, cr0);
1720  __ bind(&new_target_not_constructor);
1721  {
1722  FrameScope scope(masm, StackFrame::MANUAL);
1723  __ EnterFrame(StackFrame::INTERNAL);
1724  __ Push(r6);
1725  __ CallRuntime(Runtime::kThrowNotConstructor);
1726  }
1727  __ bind(&new_target_constructor);
1728  }
1729 
1730  // Check if we have an arguments adaptor frame below the function frame.
1731  Label arguments_adaptor, arguments_done;
1732  __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1733  __ LoadP(ip, MemOperand(r7, CommonFrameConstants::kContextOrFrameTypeOffset));
1734  __ cmpi(ip, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1735  __ beq(&arguments_adaptor);
1736  {
1737  __ LoadP(r8, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1738  __ LoadP(r8, FieldMemOperand(r8, JSFunction::kSharedFunctionInfoOffset));
1739  __ LoadHalfWord(
1740  r8,
1741  FieldMemOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
1742  __ mr(r7, fp);
1743  }
1744  __ b(&arguments_done);
1745  __ bind(&arguments_adaptor);
1746  {
1747  // Load the length from the ArgumentsAdaptorFrame.
1748  __ LoadP(r8, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset));
1749  __ SmiUntag(r8);
1750  }
1751  __ bind(&arguments_done);
1752 
1753  Label stack_done, stack_overflow;
1754  __ sub(r8, r8, r5);
1755  __ cmpi(r8, Operand::Zero());
1756  __ ble(&stack_done);
1757  {
1758  // Check for stack overflow.
1759  Generate_StackOverflowCheck(masm, r8, r5, &stack_overflow);
1760 
1761  // Forward the arguments from the caller frame.
1762  {
1763  Label loop;
1764  __ addi(r7, r7, Operand(kPointerSize));
1765  __ add(r3, r3, r8);
1766  __ bind(&loop);
1767  {
1768  __ ShiftLeftImm(ip, r8, Operand(kPointerSizeLog2));
1769  __ LoadPX(ip, MemOperand(r7, ip));
1770  __ push(ip);
1771  __ subi(r8, r8, Operand(1));
1772  __ cmpi(r8, Operand::Zero());
1773  __ bne(&loop);
1774  }
1775  }
1776  }
1777  __ b(&stack_done);
1778  __ bind(&stack_overflow);
1779  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1780  __ bind(&stack_done);
1781 
1782  // Tail-call to the {code} handler.
1783  __ Jump(code, RelocInfo::CODE_TARGET);
1784 }
1785 
1786 // static
1787 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1788  ConvertReceiverMode mode) {
1789  // ----------- S t a t e -------------
1790  // -- r3 : the number of arguments (not including the receiver)
1791  // -- r4 : the function to call (checked to be a JSFunction)
1792  // -----------------------------------
1793  __ AssertFunction(r4);
1794 
1795  // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1796  // Check that the function is not a "classConstructor".
1797  Label class_constructor;
1798  __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1799  __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kFlagsOffset));
1800  __ TestBitMask(r6, SharedFunctionInfo::IsClassConstructorBit::kMask, r0);
1801  __ bne(&class_constructor, cr0);
1802 
1803  // Enter the context of the function; ToObject has to run in the function
1804  // context, and we also need to take the global proxy from the function
1805  // context in case of conversion.
1806  __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1807  // We need to convert the receiver for non-native sloppy mode functions.
1808  Label done_convert;
1809  __ andi(r0, r6,
1810  Operand(SharedFunctionInfo::IsStrictBit::kMask |
1811  SharedFunctionInfo::IsNativeBit::kMask));
1812  __ bne(&done_convert, cr0);
1813  {
1814  // ----------- S t a t e -------------
1815  // -- r3 : the number of arguments (not including the receiver)
1816  // -- r4 : the function to call (checked to be a JSFunction)
1817  // -- r5 : the shared function info.
1818  // -- cp : the function context.
1819  // -----------------------------------
1820 
1821  if (mode == ConvertReceiverMode::kNullOrUndefined) {
1822  // Patch receiver to global proxy.
1823  __ LoadGlobalProxy(r6);
1824  } else {
1825  Label convert_to_object, convert_receiver;
1826  __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2));
1827  __ LoadPX(r6, MemOperand(sp, r6));
1828  __ JumpIfSmi(r6, &convert_to_object);
1829  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1830  __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
1831  __ bge(&done_convert);
1832  if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1833  Label convert_global_proxy;
1834  __ JumpIfRoot(r6, RootIndex::kUndefinedValue, &convert_global_proxy);
1835  __ JumpIfNotRoot(r6, RootIndex::kNullValue, &convert_to_object);
1836  __ bind(&convert_global_proxy);
1837  {
1838  // Patch receiver to global proxy.
1839  __ LoadGlobalProxy(r6);
1840  }
1841  __ b(&convert_receiver);
1842  }
1843  __ bind(&convert_to_object);
1844  {
1845  // Convert receiver using ToObject.
1846  // TODO(bmeurer): Inline the allocation here to avoid building the frame
1847  // in the fast case? (fall back to AllocateInNewSpace?)
1848  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1849  __ SmiTag(r3);
1850  __ Push(r3, r4);
1851  __ mr(r3, r6);
1852  __ Push(cp);
1853  __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1854  RelocInfo::CODE_TARGET);
1855  __ Pop(cp);
1856  __ mr(r6, r3);
1857  __ Pop(r3, r4);
1858  __ SmiUntag(r3);
1859  }
1860  __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1861  __ bind(&convert_receiver);
1862  }
1863  __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
1864  __ StorePX(r6, MemOperand(sp, r7));
1865  }
1866  __ bind(&done_convert);
1867 
1868  // ----------- S t a t e -------------
1869  // -- r3 : the number of arguments (not including the receiver)
1870  // -- r4 : the function to call (checked to be a JSFunction)
1871  // -- r5 : the shared function info.
1872  // -- cp : the function context.
1873  // -----------------------------------
1874 
1875  __ LoadHalfWord(
1876  r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
1877  ParameterCount actual(r3);
1878  ParameterCount expected(r5);
1879  __ InvokeFunctionCode(r4, no_reg, expected, actual, JUMP_FUNCTION);
1880 
1881  // The function is a "classConstructor", need to raise an exception.
1882  __ bind(&class_constructor);
1883  {
1884  FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
1885  __ push(r4);
1886  __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
1887  }
1888 }
1889 
1890 namespace {
1891 
1892 void Generate_PushBoundArguments(MacroAssembler* masm) {
1893  // ----------- S t a t e -------------
1894  // -- r3 : the number of arguments (not including the receiver)
1895  // -- r4 : target (checked to be a JSBoundFunction)
1896  // -- r6 : new.target (only in case of [[Construct]])
1897  // -----------------------------------
1898 
1899  // Load [[BoundArguments]] into r5 and length of that into r7.
1900  Label no_bound_arguments;
1901  __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset));
1902  __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset));
1903  __ SmiUntag(r7, SetRC);
1904  __ beq(&no_bound_arguments, cr0);
1905  {
1906  // ----------- S t a t e -------------
1907  // -- r3 : the number of arguments (not including the receiver)
1908  // -- r4 : target (checked to be a JSBoundFunction)
1909  // -- r5 : the [[BoundArguments]] (implemented as FixedArray)
1910  // -- r6 : new.target (only in case of [[Construct]])
1911  // -- r7 : the number of [[BoundArguments]]
1912  // -----------------------------------
1913 
1914  // Reserve stack space for the [[BoundArguments]].
1915  {
1916  Label done;
1917  __ mr(r9, sp); // preserve previous stack pointer
1918  __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2));
1919  __ sub(sp, sp, r10);
1920  // Check the stack for overflow. We are not trying to catch interruptions
1921  // (i.e. debug break and preemption) here, so check the "real stack
1922  // limit".
1923  __ CompareRoot(sp, RootIndex::kRealStackLimit);
1924  __ bgt(&done); // Signed comparison.
1925  // Restore the stack pointer.
1926  __ mr(sp, r9);
1927  {
1928  FrameScope scope(masm, StackFrame::MANUAL);
1929  __ EnterFrame(StackFrame::INTERNAL);
1930  __ CallRuntime(Runtime::kThrowStackOverflow);
1931  }
1932  __ bind(&done);
1933  }
1934 
1935  // Relocate arguments down the stack.
1936  // -- r3 : the number of arguments (not including the receiver)
1937  // -- r9 : the previous stack pointer
1938  // -- r10: the size of the [[BoundArguments]]
1939  {
1940  Label skip, loop;
1941  __ li(r8, Operand::Zero());
1942  __ cmpi(r3, Operand::Zero());
1943  __ beq(&skip);
1944  __ mtctr(r3);
1945  __ bind(&loop);
1946  __ LoadPX(r0, MemOperand(r9, r8));
1947  __ StorePX(r0, MemOperand(sp, r8));
1948  __ addi(r8, r8, Operand(kPointerSize));
1949  __ bdnz(&loop);
1950  __ bind(&skip);
1951  }
1952 
1953  // Copy [[BoundArguments]] to the stack (below the arguments).
1954  {
1955  Label loop;
1956  __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1957  __ add(r5, r5, r10);
1958  __ mtctr(r7);
1959  __ bind(&loop);
1960  __ LoadPU(r0, MemOperand(r5, -kPointerSize));
1961  __ StorePX(r0, MemOperand(sp, r8));
1962  __ addi(r8, r8, Operand(kPointerSize));
1963  __ bdnz(&loop);
1964  __ add(r3, r3, r7);
1965  }
1966  }
1967  __ bind(&no_bound_arguments);
1968 }
1969 
1970 } // namespace
1971 
1972 // static
1973 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
1974  // ----------- S t a t e -------------
1975  // -- r3 : the number of arguments (not including the receiver)
1976  // -- r4 : the function to call (checked to be a JSBoundFunction)
1977  // -----------------------------------
1978  __ AssertBoundFunction(r4);
1979 
1980  // Patch the receiver to [[BoundThis]].
1981  __ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
1982  __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
1983  __ StorePX(ip, MemOperand(sp, r0));
1984 
1985  // Push the [[BoundArguments]] onto the stack.
1986  Generate_PushBoundArguments(masm);
1987 
1988  // Call the [[BoundTargetFunction]] via the Call builtin.
1989  __ LoadP(r4,
1990  FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
1991  __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
1992  RelocInfo::CODE_TARGET);
1993 }
1994 
1995 // static
1996 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
1997  // ----------- S t a t e -------------
1998  // -- r3 : the number of arguments (not including the receiver)
1999  // -- r4 : the target to call (can be any Object).
2000  // -----------------------------------
2001 
2002  Label non_callable, non_function, non_smi;
2003  __ JumpIfSmi(r4, &non_callable);
2004  __ bind(&non_smi);
2005  __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
2006  __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2007  RelocInfo::CODE_TARGET, eq);
2008  __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2009  __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2010  RelocInfo::CODE_TARGET, eq);
2011 
2012  // Check if target has a [[Call]] internal method.
2013  __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2014  __ TestBit(r7, Map::IsCallableBit::kShift, r0);
2015  __ beq(&non_callable, cr0);
2016 
2017  // Check if target is a proxy and call CallProxy external builtin
2018  __ cmpi(r8, Operand(JS_PROXY_TYPE));
2019  __ bne(&non_function);
2020  __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
2021 
2022  // 2. Call to something else, which might have a [[Call]] internal method (if
2023  // not we raise an exception).
2024  __ bind(&non_function);
2025  // Overwrite the original receiver the (original) target.
2026  __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2027  __ StorePX(r4, MemOperand(sp, r8));
2028  // Let the "call_as_function_delegate" take care of the rest.
2029  __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4);
2030  __ Jump(masm->isolate()->builtins()->CallFunction(
2031  ConvertReceiverMode::kNotNullOrUndefined),
2032  RelocInfo::CODE_TARGET);
2033 
2034  // 3. Call to something that is not callable.
2035  __ bind(&non_callable);
2036  {
2037  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2038  __ Push(r4);
2039  __ CallRuntime(Runtime::kThrowCalledNonCallable);
2040  }
2041 }
2042 
2043 // static
2044 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2045  // ----------- S t a t e -------------
2046  // -- r3 : the number of arguments (not including the receiver)
2047  // -- r4 : the constructor to call (checked to be a JSFunction)
2048  // -- r6 : the new target (checked to be a constructor)
2049  // -----------------------------------
2050  __ AssertConstructor(r4);
2051  __ AssertFunction(r4);
2052 
2053  // Calling convention for function specific ConstructStubs require
2054  // r5 to contain either an AllocationSite or undefined.
2055  __ LoadRoot(r5, RootIndex::kUndefinedValue);
2056 
2057  Label call_generic_stub;
2058 
2059  // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2060  __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2061  __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kFlagsOffset));
2062  __ mov(ip, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2063  __ and_(r7, r7, ip, SetRC);
2064  __ beq(&call_generic_stub, cr0);
2065 
2066  __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2067  RelocInfo::CODE_TARGET);
2068 
2069  __ bind(&call_generic_stub);
2070  __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2071  RelocInfo::CODE_TARGET);
2072 }
2073 
2074 // static
2075 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2076  // ----------- S t a t e -------------
2077  // -- r3 : the number of arguments (not including the receiver)
2078  // -- r4 : the function to call (checked to be a JSBoundFunction)
2079  // -- r6 : the new target (checked to be a constructor)
2080  // -----------------------------------
2081  __ AssertConstructor(r4);
2082  __ AssertBoundFunction(r4);
2083 
2084  // Push the [[BoundArguments]] onto the stack.
2085  Generate_PushBoundArguments(masm);
2086 
2087  // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2088  Label skip;
2089  __ cmp(r4, r6);
2090  __ bne(&skip);
2091  __ LoadP(r6,
2092  FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2093  __ bind(&skip);
2094 
2095  // Construct the [[BoundTargetFunction]] via the Construct builtin.
2096  __ LoadP(r4,
2097  FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2098  __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2099 }
2100 
2101 // static
2102 void Builtins::Generate_Construct(MacroAssembler* masm) {
2103  // ----------- S t a t e -------------
2104  // -- r3 : the number of arguments (not including the receiver)
2105  // -- r4 : the constructor to call (can be any Object)
2106  // -- r6 : the new target (either the same as the constructor or
2107  // the JSFunction on which new was invoked initially)
2108  // -----------------------------------
2109 
2110  // Check if target is a Smi.
2111  Label non_constructor, non_proxy;
2112  __ JumpIfSmi(r4, &non_constructor);
2113 
2114  // Check if target has a [[Construct]] internal method.
2115  __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
2116  __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
2117  __ TestBit(r5, Map::IsConstructorBit::kShift, r0);
2118  __ beq(&non_constructor, cr0);
2119 
2120  // Dispatch based on instance type.
2121  __ CompareInstanceType(r7, r8, JS_FUNCTION_TYPE);
2122  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2123  RelocInfo::CODE_TARGET, eq);
2124 
2125  // Only dispatch to bound functions after checking whether they are
2126  // constructors.
2127  __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2128  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2129  RelocInfo::CODE_TARGET, eq);
2130 
2131  // Only dispatch to proxies after checking whether they are constructors.
2132  __ cmpi(r8, Operand(JS_PROXY_TYPE));
2133  __ bne(&non_proxy);
2134  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2135  RelocInfo::CODE_TARGET);
2136 
2137  // Called Construct on an exotic Object with a [[Construct]] internal method.
2138  __ bind(&non_proxy);
2139  {
2140  // Overwrite the original receiver with the (original) target.
2141  __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2142  __ StorePX(r4, MemOperand(sp, r8));
2143  // Let the "call_as_constructor_delegate" take care of the rest.
2144  __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4);
2145  __ Jump(masm->isolate()->builtins()->CallFunction(),
2146  RelocInfo::CODE_TARGET);
2147  }
2148 
2149  // Called Construct on an Object that doesn't have a [[Construct]] internal
2150  // method.
2151  __ bind(&non_constructor);
2152  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2153  RelocInfo::CODE_TARGET);
2154 }
2155 
2156 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2157  // ----------- S t a t e -------------
2158  // -- r3 : actual number of arguments
2159  // -- r4 : function (passed through to callee)
2160  // -- r5 : expected number of arguments
2161  // -- r6 : new target (passed through to callee)
2162  // -----------------------------------
2163 
2164  Label invoke, dont_adapt_arguments, stack_overflow;
2165 
2166  Label enough, too_few;
2167  __ cmpli(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2168  __ beq(&dont_adapt_arguments);
2169  __ cmp(r3, r5);
2170  __ blt(&too_few);
2171 
2172  { // Enough parameters: actual >= expected
2173  __ bind(&enough);
2174  EnterArgumentsAdaptorFrame(masm);
2175  Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
2176 
2177  // Calculate copy start address into r3 and copy end address into r7.
2178  // r3: actual number of arguments as a smi
2179  // r4: function
2180  // r5: expected number of arguments
2181  // r6: new target (passed through to callee)
2182  __ SmiToPtrArrayOffset(r3, r3);
2183  __ add(r3, r3, fp);
2184  // adjust for return address and receiver
2185  __ addi(r3, r3, Operand(2 * kPointerSize));
2186  __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2187  __ sub(r7, r3, r7);
2188 
2189  // Copy the arguments (including the receiver) to the new stack frame.
2190  // r3: copy start address
2191  // r4: function
2192  // r5: expected number of arguments
2193  // r6: new target (passed through to callee)
2194  // r7: copy end address
2195 
2196  Label copy;
2197  __ bind(&copy);
2198  __ LoadP(r0, MemOperand(r3, 0));
2199  __ push(r0);
2200  __ cmp(r3, r7); // Compare before moving to next argument.
2201  __ subi(r3, r3, Operand(kPointerSize));
2202  __ bne(&copy);
2203 
2204  __ b(&invoke);
2205  }
2206 
2207  { // Too few parameters: Actual < expected
2208  __ bind(&too_few);
2209 
2210  EnterArgumentsAdaptorFrame(masm);
2211  Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
2212 
2213  // Calculate copy start address into r0 and copy end address is fp.
2214  // r3: actual number of arguments as a smi
2215  // r4: function
2216  // r5: expected number of arguments
2217  // r6: new target (passed through to callee)
2218  __ SmiToPtrArrayOffset(r3, r3);
2219  __ add(r3, r3, fp);
2220 
2221  // Copy the arguments (including the receiver) to the new stack frame.
2222  // r3: copy start address
2223  // r4: function
2224  // r5: expected number of arguments
2225  // r6: new target (passed through to callee)
2226  Label copy;
2227  __ bind(&copy);
2228  // Adjust load for return address and receiver.
2229  __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
2230  __ push(r0);
2231  __ cmp(r3, fp); // Compare before moving to next argument.
2232  __ subi(r3, r3, Operand(kPointerSize));
2233  __ bne(&copy);
2234 
2235  // Fill the remaining expected arguments with undefined.
2236  // r4: function
2237  // r5: expected number of arguments
2238  // r6: new target (passed through to callee)
2239  __ LoadRoot(r0, RootIndex::kUndefinedValue);
2240  __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2241  __ sub(r7, fp, r7);
2242  // Adjust for frame.
2243  __ subi(r7, r7,
2244  Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
2245  kPointerSize));
2246 
2247  Label fill;
2248  __ bind(&fill);
2249  __ push(r0);
2250  __ cmp(sp, r7);
2251  __ bne(&fill);
2252  }
2253 
2254  // Call the entry point.
2255  __ bind(&invoke);
2256  __ mr(r3, r5);
2257  // r3 : expected number of arguments
2258  // r4 : function (passed through to callee)
2259  // r6 : new target (passed through to callee)
2260  static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
2261  __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
2262  __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
2263  __ CallJSEntry(r5);
2264 
2265  // Store offset of return address for deoptimizer.
2266  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2267 
2268  // Exit frame and return.
2269  LeaveArgumentsAdaptorFrame(masm);
2270  __ blr();
2271 
2272  // -------------------------------------------
2273  // Dont adapt arguments.
2274  // -------------------------------------------
2275  __ bind(&dont_adapt_arguments);
2276  static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
2277  __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
2278  __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
2279  __ JumpToJSEntry(r5);
2280 
2281  __ bind(&stack_overflow);
2282  {
2283  FrameScope frame(masm, StackFrame::MANUAL);
2284  __ CallRuntime(Runtime::kThrowStackOverflow);
2285  __ bkpt(0);
2286  }
2287 }
2288 
2289 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2290  // The function index was put in a register by the jump table trampoline.
2291  // Convert to Smi for the runtime call.
2292  __ SmiTag(kWasmCompileLazyFuncIndexRegister,
2293  kWasmCompileLazyFuncIndexRegister);
2294  {
2295  HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2296  FrameAndConstantPoolScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2297 
2298  // Save all parameter registers (see wasm-linkage.cc). They might be
2299  // overwritten in the runtime call below. We don't have any callee-saved
2300  // registers in wasm, so no need to store anything else.
2301  constexpr RegList gp_regs =
2302  Register::ListOf<r3, r4, r5, r6, r7, r8, r9, r10>();
2303  constexpr RegList fp_regs =
2304  DoubleRegister::ListOf<d1, d2, d3, d4, d5, d6, d7, d8>();
2305  __ MultiPush(gp_regs);
2306  __ MultiPushDoubles(fp_regs);
2307 
2308  // Pass instance and function index as explicit arguments to the runtime
2309  // function.
2310  __ Push(kWasmInstanceRegister, kWasmCompileLazyFuncIndexRegister);
2311  // Load the correct CEntry builtin from the instance object.
2312  __ LoadP(r5, FieldMemOperand(kWasmInstanceRegister,
2313  WasmInstanceObject::kCEntryStubOffset));
2314  // Initialize the JavaScript context with 0. CEntry will use it to
2315  // set the current context on the isolate.
2316  __ LoadSmiLiteral(cp, Smi::zero());
2317  __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, r5);
2318  // The entrypoint address is the return value.
2319  __ mr(r11, kReturnRegister0);
2320 
2321  // Restore registers.
2322  __ MultiPopDoubles(fp_regs);
2323  __ MultiPop(gp_regs);
2324  }
2325  // Finally, jump to the entrypoint.
2326  __ Jump(r11);
2327 }
2328 
2329 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2330  SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2331  bool builtin_exit_frame) {
2332  // Called from JavaScript; parameters are on stack as if calling JS function.
2333  // r3: number of arguments including receiver
2334  // r4: pointer to builtin function
2335  // fp: frame pointer (restored after C call)
2336  // sp: stack pointer (restored as callee's sp after C call)
2337  // cp: current context (C callee-saved)
2338  //
2339  // If argv_mode == kArgvInRegister:
2340  // r5: pointer to the first argument
2341 
2342  __ mr(r15, r4);
2343 
2344  if (argv_mode == kArgvInRegister) {
2345  // Move argv into the correct register.
2346  __ mr(r4, r5);
2347  } else {
2348  // Compute the argv pointer.
2349  __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2));
2350  __ add(r4, r4, sp);
2351  __ subi(r4, r4, Operand(kPointerSize));
2352  }
2353 
2354  // Enter the exit frame that transitions from JavaScript to C++.
2355  FrameScope scope(masm, StackFrame::MANUAL);
2356 
2357  // Need at least one extra slot for return address location.
2358  int arg_stack_space = 1;
2359 
2360  // Pass buffer for return value on stack if necessary
2361  bool needs_return_buffer =
2362  (result_size == 2 && !ABI_RETURNS_OBJECT_PAIRS_IN_REGS);
2363  if (needs_return_buffer) {
2364  arg_stack_space += result_size;
2365  }
2366 
2367  __ EnterExitFrame(
2368  save_doubles, arg_stack_space,
2369  builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2370 
2371  // Store a copy of argc in callee-saved registers for later.
2372  __ mr(r14, r3);
2373 
2374  // r3, r14: number of arguments including receiver (C callee-saved)
2375  // r4: pointer to the first argument
2376  // r15: pointer to builtin function (C callee-saved)
2377 
2378  // Result returned in registers or stack, depending on result size and ABI.
2379 
2380  Register isolate_reg = r5;
2381  if (needs_return_buffer) {
2382  // The return value is a non-scalar value.
2383  // Use frame storage reserved by calling function to pass return
2384  // buffer as implicit first argument.
2385  __ mr(r5, r4);
2386  __ mr(r4, r3);
2387  __ addi(r3, sp, Operand((kStackFrameExtraParamSlot + 1) * kPointerSize));
2388  isolate_reg = r6;
2389  }
2390 
2391  // Call C built-in.
2392  __ Move(isolate_reg, ExternalReference::isolate_address(masm->isolate()));
2393 
2394  Register target = r15;
2395  if (ABI_USES_FUNCTION_DESCRIPTORS) {
2396  // AIX/PPC64BE Linux use a function descriptor.
2397  __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(r15, kPointerSize));
2398  __ LoadP(ip, MemOperand(r15, 0)); // Instruction address
2399  target = ip;
2400  } else if (ABI_CALL_VIA_IP) {
2401  __ Move(ip, r15);
2402  target = ip;
2403  }
2404 
2405  // To let the GC traverse the return address of the exit frames, we need to
2406  // know where the return address is. The CEntryStub is unmovable, so
2407  // we can store the address on the stack to be able to find it again and
2408  // we never have to restore it, because it will not change.
2409  Label start_call;
2410  constexpr int after_call_offset = 5 * kInstrSize;
2411  DCHECK_NE(r7, target);
2412  __ LoadPC(r7);
2413  __ bind(&start_call);
2414  __ addi(r7, r7, Operand(after_call_offset));
2415  __ StoreP(r7, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize));
2416  __ Call(target);
2417  DCHECK_EQ(after_call_offset - kInstrSize,
2418  __ SizeOfCodeGeneratedSince(&start_call));
2419 
2420  // If return value is on the stack, pop it to registers.
2421  if (needs_return_buffer) {
2422  __ LoadP(r4, MemOperand(r3, kPointerSize));
2423  __ LoadP(r3, MemOperand(r3));
2424  }
2425 
2426  // Check result for exception sentinel.
2427  Label exception_returned;
2428  __ CompareRoot(r3, RootIndex::kException);
2429  __ beq(&exception_returned);
2430 
2431  // Check that there is no pending exception, otherwise we
2432  // should have returned the exception sentinel.
2433  if (FLAG_debug_code) {
2434  Label okay;
2435  ExternalReference pending_exception_address = ExternalReference::Create(
2436  IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2437 
2438  __ Move(r6, pending_exception_address);
2439  __ LoadP(r6, MemOperand(r6));
2440  __ CompareRoot(r6, RootIndex::kTheHoleValue);
2441  // Cannot use check here as it attempts to generate call into runtime.
2442  __ beq(&okay);
2443  __ stop("Unexpected pending exception");
2444  __ bind(&okay);
2445  }
2446 
2447  // Exit C frame and return.
2448  // r3:r4: result
2449  // sp: stack pointer
2450  // fp: frame pointer
2451  Register argc = argv_mode == kArgvInRegister
2452  // We don't want to pop arguments so set argc to no_reg.
2453  ? no_reg
2454  // r14: still holds argc (callee-saved).
2455  : r14;
2456  __ LeaveExitFrame(save_doubles, argc);
2457  __ blr();
2458 
2459  // Handling of exception.
2460  __ bind(&exception_returned);
2461 
2462  ExternalReference pending_handler_context_address = ExternalReference::Create(
2463  IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2464  ExternalReference pending_handler_entrypoint_address =
2465  ExternalReference::Create(
2466  IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2467  ExternalReference pending_handler_constant_pool_address =
2468  ExternalReference::Create(
2469  IsolateAddressId::kPendingHandlerConstantPoolAddress,
2470  masm->isolate());
2471  ExternalReference pending_handler_fp_address = ExternalReference::Create(
2472  IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2473  ExternalReference pending_handler_sp_address = ExternalReference::Create(
2474  IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2475 
2476  // Ask the runtime for help to determine the handler. This will set r3 to
2477  // contain the current pending exception, don't clobber it.
2478  ExternalReference find_handler =
2479  ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2480  {
2481  FrameScope scope(masm, StackFrame::MANUAL);
2482  __ PrepareCallCFunction(3, 0, r3);
2483  __ li(r3, Operand::Zero());
2484  __ li(r4, Operand::Zero());
2485  __ Move(r5, ExternalReference::isolate_address(masm->isolate()));
2486  __ CallCFunction(find_handler, 3);
2487  }
2488 
2489  // Retrieve the handler context, SP and FP.
2490  __ Move(cp, pending_handler_context_address);
2491  __ LoadP(cp, MemOperand(cp));
2492  __ Move(sp, pending_handler_sp_address);
2493  __ LoadP(sp, MemOperand(sp));
2494  __ Move(fp, pending_handler_fp_address);
2495  __ LoadP(fp, MemOperand(fp));
2496 
2497  // If the handler is a JS frame, restore the context to the frame. Note that
2498  // the context will be set to (cp == 0) for non-JS frames.
2499  Label skip;
2500  __ cmpi(cp, Operand::Zero());
2501  __ beq(&skip);
2502  __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2503  __ bind(&skip);
2504 
2505  // Reset the masking register. This is done independent of the underlying
2506  // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
2507  // with both configurations. It is safe to always do this, because the
2508  // underlying register is caller-saved and can be arbitrarily clobbered.
2509  __ ResetSpeculationPoisonRegister();
2510 
2511  // Compute the handler entry address and jump to it.
2512  ConstantPoolUnavailableScope constant_pool_unavailable(masm);
2513  __ Move(ip, pending_handler_entrypoint_address);
2514  __ LoadP(ip, MemOperand(ip));
2515  if (FLAG_enable_embedded_constant_pool) {
2516  __ Move(kConstantPoolRegister, pending_handler_constant_pool_address);
2517  __ LoadP(kConstantPoolRegister, MemOperand(kConstantPoolRegister));
2518  }
2519  __ Jump(ip);
2520 }
2521 
2522 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2523  Label out_of_range, only_low, negate, done, fastpath_done;
2524  Register result_reg = r3;
2525 
2526  HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2527 
2528  // Immediate values for this stub fit in instructions, so it's safe to use ip.
2529  Register scratch = GetRegisterThatIsNotOneOf(result_reg);
2530  Register scratch_low = GetRegisterThatIsNotOneOf(result_reg, scratch);
2531  Register scratch_high =
2532  GetRegisterThatIsNotOneOf(result_reg, scratch, scratch_low);
2533  DoubleRegister double_scratch = kScratchDoubleReg;
2534 
2535  __ Push(result_reg, scratch);
2536  // Account for saved regs.
2537  int argument_offset = 2 * kPointerSize;
2538 
2539  // Load double input.
2540  __ lfd(double_scratch, MemOperand(sp, argument_offset));
2541 
2542  // Do fast-path convert from double to int.
2543  __ ConvertDoubleToInt64(double_scratch,
2544 #if !V8_TARGET_ARCH_PPC64
2545  scratch,
2546 #endif
2547  result_reg, d0);
2548 
2549 // Test for overflow
2550 #if V8_TARGET_ARCH_PPC64
2551  __ TestIfInt32(result_reg, r0);
2552 #else
2553  __ TestIfInt32(scratch, result_reg, r0);
2554 #endif
2555  __ beq(&fastpath_done);
2556 
2557  __ Push(scratch_high, scratch_low);
2558  // Account for saved regs.
2559  argument_offset += 2 * kPointerSize;
2560 
2561  __ lwz(scratch_high,
2562  MemOperand(sp, argument_offset + Register::kExponentOffset));
2563  __ lwz(scratch_low,
2564  MemOperand(sp, argument_offset + Register::kMantissaOffset));
2565 
2566  __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask);
2567  // Load scratch with exponent - 1. This is faster than loading
2568  // with exponent because Bias + 1 = 1024 which is a *PPC* immediate value.
2569  STATIC_ASSERT(HeapNumber::kExponentBias + 1 == 1024);
2570  __ subi(scratch, scratch, Operand(HeapNumber::kExponentBias + 1));
2571  // If exponent is greater than or equal to 84, the 32 less significant
2572  // bits are 0s (2^84 = 1, 52 significant bits, 32 uncoded bits),
2573  // the result is 0.
2574  // Compare exponent with 84 (compare exponent - 1 with 83).
2575  __ cmpi(scratch, Operand(83));
2576  __ bge(&out_of_range);
2577 
2578  // If we reach this code, 31 <= exponent <= 83.
2579  // So, we don't have to handle cases where 0 <= exponent <= 20 for
2580  // which we would need to shift right the high part of the mantissa.
2581  // Scratch contains exponent - 1.
2582  // Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
2583  __ subfic(scratch, scratch, Operand(51));
2584  __ cmpi(scratch, Operand::Zero());
2585  __ ble(&only_low);
2586  // 21 <= exponent <= 51, shift scratch_low and scratch_high
2587  // to generate the result.
2588  __ srw(scratch_low, scratch_low, scratch);
2589  // Scratch contains: 52 - exponent.
2590  // We needs: exponent - 20.
2591  // So we use: 32 - scratch = 32 - 52 + exponent = exponent - 20.
2592  __ subfic(scratch, scratch, Operand(32));
2593  __ ExtractBitMask(result_reg, scratch_high, HeapNumber::kMantissaMask);
2594  // Set the implicit 1 before the mantissa part in scratch_high.
2595  STATIC_ASSERT(HeapNumber::kMantissaBitsInTopWord >= 16);
2596  __ oris(result_reg, result_reg,
2597  Operand(1 << ((HeapNumber::kMantissaBitsInTopWord)-16)));
2598  __ slw(r0, result_reg, scratch);
2599  __ orx(result_reg, scratch_low, r0);
2600  __ b(&negate);
2601 
2602  __ bind(&out_of_range);
2603  __ mov(result_reg, Operand::Zero());
2604  __ b(&done);
2605 
2606  __ bind(&only_low);
2607  // 52 <= exponent <= 83, shift only scratch_low.
2608  // On entry, scratch contains: 52 - exponent.
2609  __ neg(scratch, scratch);
2610  __ slw(result_reg, scratch_low, scratch);
2611 
2612  __ bind(&negate);
2613  // If input was positive, scratch_high ASR 31 equals 0 and
2614  // scratch_high LSR 31 equals zero.
2615  // New result = (result eor 0) + 0 = result.
2616  // If the input was negative, we have to negate the result.
2617  // Input_high ASR 31 equals 0xFFFFFFFF and scratch_high LSR 31 equals 1.
2618  // New result = (result eor 0xFFFFFFFF) + 1 = 0 - result.
2619  __ srawi(r0, scratch_high, 31);
2620 #if V8_TARGET_ARCH_PPC64
2621  __ srdi(r0, r0, Operand(32));
2622 #endif
2623  __ xor_(result_reg, result_reg, r0);
2624  __ srwi(r0, scratch_high, Operand(31));
2625  __ add(result_reg, result_reg, r0);
2626 
2627  __ bind(&done);
2628  __ Pop(scratch_high, scratch_low);
2629  // Account for saved regs.
2630  argument_offset -= 2 * kPointerSize;
2631 
2632  __ bind(&fastpath_done);
2633  __ StoreP(result_reg, MemOperand(sp, argument_offset));
2634  __ Pop(result_reg, scratch);
2635 
2636  __ Ret();
2637 }
2638 
2639 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2640  const Register exponent = r5;
2641  const DoubleRegister double_base = d1;
2642  const DoubleRegister double_exponent = d2;
2643  const DoubleRegister double_result = d3;
2644  const DoubleRegister double_scratch = d0;
2645  const Register scratch = r11;
2646  const Register scratch2 = r10;
2647 
2648  Label call_runtime, done, int_exponent;
2649 
2650  // Detect integer exponents stored as double.
2651  __ TryDoubleToInt32Exact(scratch, double_exponent, scratch2, double_scratch);
2652  __ beq(&int_exponent);
2653 
2654  __ mflr(r0);
2655  __ push(r0);
2656  {
2657  AllowExternalCallThatCantCauseGC scope(masm);
2658  __ PrepareCallCFunction(0, 2, scratch);
2659  __ MovToFloatParameters(double_base, double_exponent);
2660  __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2661  }
2662  __ pop(r0);
2663  __ mtlr(r0);
2664  __ MovFromFloatResult(double_result);
2665  __ b(&done);
2666 
2667  // Calculate power with integer exponent.
2668  __ bind(&int_exponent);
2669 
2670  // Get two copies of exponent in the registers scratch and exponent.
2671  // Exponent has previously been stored into scratch as untagged integer.
2672  __ mr(exponent, scratch);
2673 
2674  __ fmr(double_scratch, double_base); // Back up base.
2675  __ li(scratch2, Operand(1));
2676  __ ConvertIntToDouble(scratch2, double_result);
2677 
2678  // Get absolute value of exponent.
2679  __ cmpi(scratch, Operand::Zero());
2680  if (CpuFeatures::IsSupported(ISELECT)) {
2681  __ neg(scratch2, scratch);
2682  __ isel(lt, scratch, scratch2, scratch);
2683  } else {
2684  Label positive_exponent;
2685  __ bge(&positive_exponent);
2686  __ neg(scratch, scratch);
2687  __ bind(&positive_exponent);
2688  }
2689 
2690  Label while_true, no_carry, loop_end;
2691  __ bind(&while_true);
2692  __ andi(scratch2, scratch, Operand(1));
2693  __ beq(&no_carry, cr0);
2694  __ fmul(double_result, double_result, double_scratch);
2695  __ bind(&no_carry);
2696  __ ShiftRightImm(scratch, scratch, Operand(1), SetRC);
2697  __ beq(&loop_end, cr0);
2698  __ fmul(double_scratch, double_scratch, double_scratch);
2699  __ b(&while_true);
2700  __ bind(&loop_end);
2701 
2702  __ cmpi(exponent, Operand::Zero());
2703  __ bge(&done);
2704 
2705  __ li(scratch2, Operand(1));
2706  __ ConvertIntToDouble(scratch2, double_scratch);
2707  __ fdiv(double_result, double_scratch, double_result);
2708  // Test whether result is zero. Bail out to check for subnormal result.
2709  // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
2710  __ fcmpu(double_result, kDoubleRegZero);
2711  __ bne(&done);
2712  // double_exponent may not containe the exponent value if the input was a
2713  // smi. We set it with exponent value before bailing out.
2714  __ ConvertIntToDouble(exponent, double_exponent);
2715 
2716  // Returning or bailing out.
2717  __ mflr(r0);
2718  __ push(r0);
2719  {
2720  AllowExternalCallThatCantCauseGC scope(masm);
2721  __ PrepareCallCFunction(0, 2, scratch);
2722  __ MovToFloatParameters(double_base, double_exponent);
2723  __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2724  }
2725  __ pop(r0);
2726  __ mtlr(r0);
2727  __ MovFromFloatResult(double_result);
2728 
2729  __ bind(&done);
2730  __ Ret();
2731 }
2732 
2733 namespace {
2734 
2735 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2736  ElementsKind kind) {
2737  // Load undefined into the allocation site parameter as required by
2738  // ArrayNArgumentsConstructor.
2739  __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue);
2740 
2741  __ cmpli(r3, Operand(1));
2742 
2743  __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2744  .code(),
2745  RelocInfo::CODE_TARGET, lt);
2746 
2747  __ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
2748  RelocInfo::CODE_TARGET, gt);
2749 
2750  if (IsFastPackedElementsKind(kind)) {
2751  // We might need to create a holey array
2752  // look at the first argument
2753  __ LoadP(r6, MemOperand(sp, 0));
2754  __ cmpi(r6, Operand::Zero());
2755 
2756  __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2757  masm->isolate(), GetHoleyElementsKind(kind))
2758  .code(),
2759  RelocInfo::CODE_TARGET, ne);
2760  }
2761 
2762  __ Jump(
2763  CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2764  .code(),
2765  RelocInfo::CODE_TARGET);
2766 }
2767 
2768 } // namespace
2769 
2770 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2771  // ----------- S t a t e -------------
2772  // -- r3 : argc
2773  // -- r4 : constructor
2774  // -- sp[0] : return address
2775  // -- sp[4] : last argument
2776  // -----------------------------------
2777 
2778  if (FLAG_debug_code) {
2779  // The array construct code is only set for the global and natives
2780  // builtin Array functions which always have maps.
2781 
2782  // Initial map for the builtin Array function should be a map.
2783  __ LoadP(r6, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
2784  // Will both indicate a nullptr and a Smi.
2785  __ TestIfSmi(r6, r0);
2786  __ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction, cr0);
2787  __ CompareObjectType(r6, r6, r7, MAP_TYPE);
2788  __ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction);
2789  }
2790 
2791  // Figure out the right elements kind
2792  __ LoadP(r6, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
2793  // Load the map's "bit field 2" into |result|.
2794  __ lbz(r6, FieldMemOperand(r6, Map::kBitField2Offset));
2795  // Retrieve elements_kind from bit field 2.
2796  __ DecodeField<Map::ElementsKindBits>(r6);
2797 
2798  if (FLAG_debug_code) {
2799  Label done;
2800  __ cmpi(r6, Operand(PACKED_ELEMENTS));
2801  __ beq(&done);
2802  __ cmpi(r6, Operand(HOLEY_ELEMENTS));
2803  __ Assert(
2804  eq,
2805  AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2806  __ bind(&done);
2807  }
2808 
2809  Label fast_elements_case;
2810  __ cmpi(r6, Operand(PACKED_ELEMENTS));
2811  __ beq(&fast_elements_case);
2812  GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2813 
2814  __ bind(&fast_elements_case);
2815  GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2816 }
2817 
2818 #undef __
2819 } // namespace internal
2820 } // namespace v8
2821 
2822 #endif // V8_TARGET_ARCH_PPC
Definition: libplatform.h:13