V8 API Reference, 7.2.502.16 (for Deno 0.2.4)
builtins-arm.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_ARM
6 
7 #include "src/assembler-inl.h"
8 #include "src/code-factory.h"
9 #include "src/code-stubs.h"
10 #include "src/counters.h"
11 #include "src/debug/debug.h"
12 #include "src/deoptimizer.h"
13 #include "src/frame-constants.h"
14 #include "src/frames.h"
15 #include "src/objects-inl.h"
16 #include "src/objects/js-generator.h"
17 #include "src/objects/smi.h"
18 #include "src/register-configuration.h"
19 #include "src/runtime/runtime.h"
20 #include "src/wasm/wasm-objects.h"
21 
22 namespace v8 {
23 namespace internal {
24 
25 #define __ ACCESS_MASM(masm)
26 
27 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
28  ExitFrameType exit_frame_type) {
29 #if defined(__thumb__)
30  // Thumb mode builtin.
31  DCHECK_EQ(1, reinterpret_cast<uintptr_t>(
32  ExternalReference::Create(address).address()) &
33  1);
34 #endif
35  __ Move(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
36  if (exit_frame_type == BUILTIN_EXIT) {
37  __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
38  RelocInfo::CODE_TARGET);
39  } else {
40  DCHECK(exit_frame_type == EXIT);
41  __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
42  RelocInfo::CODE_TARGET);
43  }
44 }
45 
46 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
47  // ----------- S t a t e -------------
48  // -- r0 : number of arguments
49  // -- lr : return address
50  // -- sp[...]: constructor arguments
51  // -----------------------------------
52  Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
53 
54  if (FLAG_debug_code) {
55  // Initial map for the builtin InternalArray functions should be maps.
56  __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
57  __ SmiTst(r2);
58  __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
59  __ CompareObjectType(r2, r3, r4, MAP_TYPE);
60  __ Assert(eq, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
61  }
62 
63  // Run the native code for the InternalArray function called as a normal
64  // function.
65  __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
66  RelocInfo::CODE_TARGET);
67 }
68 
69 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
70  Runtime::FunctionId function_id) {
71  // ----------- S t a t e -------------
72  // -- r0 : argument count (preserved for callee)
73  // -- r1 : target function (preserved for callee)
74  // -- r3 : new target (preserved for callee)
75  // -----------------------------------
76  {
77  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
78  // Push the number of arguments to the callee.
79  __ SmiTag(r0);
80  __ push(r0);
81  // Push a copy of the target function and the new target.
82  __ push(r1);
83  __ push(r3);
84  // Push function as parameter to the runtime call.
85  __ Push(r1);
86 
87  __ CallRuntime(function_id, 1);
88  __ mov(r2, r0);
89 
90  // Restore target function and new target.
91  __ pop(r3);
92  __ pop(r1);
93  __ pop(r0);
94  __ SmiUntag(r0, r0);
95  }
96  static_assert(kJavaScriptCallCodeStartRegister == r2, "ABI mismatch");
97  __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
98  __ Jump(r2);
99 }
100 
101 namespace {
102 
103 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
104  // ----------- S t a t e -------------
105  // -- r0 : number of arguments
106  // -- r1 : constructor function
107  // -- r3 : new target
108  // -- cp : context
109  // -- lr : return address
110  // -- sp[...]: constructor arguments
111  // -----------------------------------
112 
113  Register scratch = r2;
114 
115  // Enter a construct frame.
116  {
117  FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
118 
119  // Preserve the incoming parameters on the stack.
120  __ SmiTag(r0);
121  __ Push(cp, r0);
122  __ SmiUntag(r0);
123 
124  // The receiver for the builtin/api call.
125  __ PushRoot(RootIndex::kTheHoleValue);
126 
127  // Set up pointer to last argument.
128  __ add(r4, fp, Operand(StandardFrameConstants::kCallerSPOffset));
129 
130  // Copy arguments and receiver to the expression stack.
131  Label loop, entry;
132  __ mov(r5, r0);
133  // ----------- S t a t e -------------
134  // -- r0: number of arguments (untagged)
135  // -- r1: constructor function
136  // -- r3: new target
137  // -- r4: pointer to last argument
138  // -- r5: counter
139  // -- sp[0*kPointerSize]: the hole (receiver)
140  // -- sp[1*kPointerSize]: number of arguments (tagged)
141  // -- sp[2*kPointerSize]: context
142  // -----------------------------------
143  __ b(&entry);
144  __ bind(&loop);
145  __ ldr(scratch, MemOperand(r4, r5, LSL, kPointerSizeLog2));
146  __ push(scratch);
147  __ bind(&entry);
148  __ sub(r5, r5, Operand(1), SetCC);
149  __ b(ge, &loop);
150 
151  // Call the function.
152  // r0: number of arguments (untagged)
153  // r1: constructor function
154  // r3: new target
155  ParameterCount actual(r0);
156  __ InvokeFunction(r1, r3, actual, CALL_FUNCTION);
157 
158  // Restore context from the frame.
159  __ ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
160  // Restore smi-tagged arguments count from the frame.
161  __ ldr(scratch, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
162  // Leave construct frame.
163  }
164 
165  // Remove caller arguments from the stack and return.
166  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
167  __ add(sp, sp, Operand(scratch, LSL, kPointerSizeLog2 - kSmiTagSize));
168  __ add(sp, sp, Operand(kPointerSize));
169  __ Jump(lr);
170 }
171 
172 void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
173  Register scratch, Label* stack_overflow) {
174  // Check the stack for overflow. We are not trying to catch
175  // interruptions (e.g. debug break and preemption) here, so the "real stack
176  // limit" is checked.
177  __ LoadRoot(scratch, RootIndex::kRealStackLimit);
178  // Make scratch the space we have left. The stack might already be overflowed
179  // here which will cause scratch to become negative.
180  __ sub(scratch, sp, scratch);
181  // Check if the arguments will overflow the stack.
182  __ cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
183  __ b(le, stack_overflow); // Signed comparison.
184 }
185 
186 } // namespace
187 
188 // The construct stub for ES5 constructor functions and ES6 class constructors.
189 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
190  // ----------- S t a t e -------------
191  // -- r0: number of arguments (untagged)
192  // -- r1: constructor function
193  // -- r3: new target
194  // -- cp: context
195  // -- lr: return address
196  // -- sp[...]: constructor arguments
197  // -----------------------------------
198 
199  // Enter a construct frame.
200  {
201  FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
202  Label post_instantiation_deopt_entry, not_create_implicit_receiver;
203 
204  // Preserve the incoming parameters on the stack.
205  __ LoadRoot(r4, RootIndex::kTheHoleValue);
206  __ SmiTag(r0);
207  __ Push(cp, r0, r1, r4, r3);
208 
209  // ----------- S t a t e -------------
210  // -- sp[0*kPointerSize]: new target
211  // -- sp[1*kPointerSize]: padding
212  // -- r1 and sp[2*kPointerSize]: constructor function
213  // -- sp[3*kPointerSize]: number of arguments (tagged)
214  // -- sp[4*kPointerSize]: context
215  // -----------------------------------
216 
217  __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
218  __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kFlagsOffset));
219  __ tst(r4, Operand(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
220  __ b(ne, &not_create_implicit_receiver);
221 
222  // If not derived class constructor: Allocate the new receiver object.
223  __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
224  r4, r5);
225  __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
226  RelocInfo::CODE_TARGET);
227  __ b(&post_instantiation_deopt_entry);
228 
229  // Else: use TheHoleValue as receiver for constructor call
230  __ bind(&not_create_implicit_receiver);
231  __ LoadRoot(r0, RootIndex::kTheHoleValue);
232 
233  // ----------- S t a t e -------------
234  // -- r0: receiver
235  // -- Slot 3 / sp[0*kPointerSize]: new target
236  // -- Slot 2 / sp[1*kPointerSize]: constructor function
237  // -- Slot 1 / sp[2*kPointerSize]: number of arguments (tagged)
238  // -- Slot 0 / sp[3*kPointerSize]: context
239  // -----------------------------------
240  // Deoptimizer enters here.
241  masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
242  masm->pc_offset());
243  __ bind(&post_instantiation_deopt_entry);
244 
245  // Restore new target.
246  __ Pop(r3);
247  // Push the allocated receiver to the stack. We need two copies
248  // because we may have to return the original one and the calling
249  // conventions dictate that the called function pops the receiver.
250  __ Push(r0, r0);
251 
252  // ----------- S t a t e -------------
253  // -- r3: new target
254  // -- sp[0*kPointerSize]: implicit receiver
255  // -- sp[1*kPointerSize]: implicit receiver
256  // -- sp[2*kPointerSize]: padding
257  // -- sp[3*kPointerSize]: constructor function
258  // -- sp[4*kPointerSize]: number of arguments (tagged)
259  // -- sp[5*kPointerSize]: context
260  // -----------------------------------
261 
262  // Restore constructor function and argument count.
263  __ ldr(r1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
264  __ ldr(r0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
265  __ SmiUntag(r0);
266 
267  // Set up pointer to last argument.
268  __ add(r4, fp, Operand(StandardFrameConstants::kCallerSPOffset));
269 
270  Label enough_stack_space, stack_overflow;
271  Generate_StackOverflowCheck(masm, r0, r5, &stack_overflow);
272  __ b(&enough_stack_space);
273 
274  __ bind(&stack_overflow);
275  // Restore the context from the frame.
276  __ ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
277  __ CallRuntime(Runtime::kThrowStackOverflow);
278  // Unreachable code.
279  __ bkpt(0);
280 
281  __ bind(&enough_stack_space);
282 
283  // Copy arguments and receiver to the expression stack.
284  Label loop, entry;
285  __ mov(r5, r0);
286  // ----------- S t a t e -------------
287  // -- r0: number of arguments (untagged)
288  // -- r3: new target
289  // -- r4: pointer to last argument
290  // -- r5: counter
291  // -- sp[0*kPointerSize]: implicit receiver
292  // -- sp[1*kPointerSize]: implicit receiver
293  // -- sp[2*kPointerSize]: padding
294  // -- r1 and sp[3*kPointerSize]: constructor function
295  // -- sp[4*kPointerSize]: number of arguments (tagged)
296  // -- sp[5*kPointerSize]: context
297  // -----------------------------------
298  __ b(&entry);
299 
300  __ bind(&loop);
301  __ ldr(r6, MemOperand(r4, r5, LSL, kPointerSizeLog2));
302  __ push(r6);
303  __ bind(&entry);
304  __ sub(r5, r5, Operand(1), SetCC);
305  __ b(ge, &loop);
306 
307  // Call the function.
308  ParameterCount actual(r0);
309  __ InvokeFunction(r1, r3, actual, CALL_FUNCTION);
310 
311  // ----------- S t a t e -------------
312  // -- r0: constructor result
313  // -- sp[0*kPointerSize]: implicit receiver
314  // -- sp[1*kPointerSize]: padding
315  // -- sp[2*kPointerSize]: constructor function
316  // -- sp[3*kPointerSize]: number of arguments
317  // -- sp[4*kPointerSize]: context
318  // -----------------------------------
319 
320  // Store offset of return address for deoptimizer.
321  masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
322  masm->pc_offset());
323 
324  // Restore the context from the frame.
325  __ ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
326 
327  // If the result is an object (in the ECMA sense), we should get rid
328  // of the receiver and use the result; see ECMA-262 section 13.2.2-7
329  // on page 74.
330  Label use_receiver, do_throw, leave_frame;
331 
332  // If the result is undefined, we jump out to using the implicit receiver.
333  __ JumpIfRoot(r0, RootIndex::kUndefinedValue, &use_receiver);
334 
335  // Otherwise we do a smi check and fall through to check if the return value
336  // is a valid receiver.
337 
338  // If the result is a smi, it is *not* an object in the ECMA sense.
339  __ JumpIfSmi(r0, &use_receiver);
340 
341  // If the type of the result (stored in its map) is less than
342  // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
343  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
344  __ CompareObjectType(r0, r4, r5, FIRST_JS_RECEIVER_TYPE);
345  __ b(ge, &leave_frame);
346  __ b(&use_receiver);
347 
348  __ bind(&do_throw);
349  __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
350 
351  // Throw away the result of the constructor invocation and use the
352  // on-stack receiver as the result.
353  __ bind(&use_receiver);
354  __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
355  __ JumpIfRoot(r0, RootIndex::kTheHoleValue, &do_throw);
356 
357  __ bind(&leave_frame);
358  // Restore smi-tagged arguments count from the frame.
359  __ ldr(r1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
360  // Leave construct frame.
361  }
362  // Remove caller arguments from the stack and return.
363  STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
364  __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
365  __ add(sp, sp, Operand(kPointerSize));
366  __ Jump(lr);
367 }
368 
369 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
370  Generate_JSBuiltinsConstructStubHelper(masm);
371 }
372 
373 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
374  Register sfi_data,
375  Register scratch1) {
376  Label done;
377 
378  __ CompareObjectType(sfi_data, scratch1, scratch1, INTERPRETER_DATA_TYPE);
379  __ b(ne, &done);
380  __ ldr(sfi_data,
381  FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
382 
383  __ bind(&done);
384 }
385 
386 // static
387 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
388  // ----------- S t a t e -------------
389  // -- r0 : the value to pass to the generator
390  // -- r1 : the JSGeneratorObject to resume
391  // -- lr : return address
392  // -----------------------------------
393  __ AssertGeneratorObject(r1);
394 
395  // Store input value into generator object.
396  __ str(r0, FieldMemOperand(r1, JSGeneratorObject::kInputOrDebugPosOffset));
397  __ RecordWriteField(r1, JSGeneratorObject::kInputOrDebugPosOffset, r0, r3,
398  kLRHasNotBeenSaved, kDontSaveFPRegs);
399 
400  // Load suspended function and context.
401  __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
402  __ ldr(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
403 
404  Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
405  Label stepping_prepared;
406  Register scratch = r5;
407 
408  // Flood function if we are stepping.
409  ExternalReference debug_hook =
410  ExternalReference::debug_hook_on_function_call_address(masm->isolate());
411  __ Move(scratch, debug_hook);
412  __ ldrsb(scratch, MemOperand(scratch));
413  __ cmp(scratch, Operand(0));
414  __ b(ne, &prepare_step_in_if_stepping);
415 
416  // Flood function if we need to continue stepping in the suspended
417  // generator.
418  ExternalReference debug_suspended_generator =
419  ExternalReference::debug_suspended_generator_address(masm->isolate());
420  __ Move(scratch, debug_suspended_generator);
421  __ ldr(scratch, MemOperand(scratch));
422  __ cmp(scratch, Operand(r1));
423  __ b(eq, &prepare_step_in_suspended_generator);
424  __ bind(&stepping_prepared);
425 
426  // Check the stack for overflow. We are not trying to catch interruptions
427  // (i.e. debug break and preemption) here, so check the "real stack limit".
428  Label stack_overflow;
429  __ CompareRoot(sp, RootIndex::kRealStackLimit);
430  __ b(lo, &stack_overflow);
431 
432  // Push receiver.
433  __ ldr(scratch, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
434  __ Push(scratch);
435 
436  // ----------- S t a t e -------------
437  // -- r1 : the JSGeneratorObject to resume
438  // -- r4 : generator function
439  // -- cp : generator context
440  // -- lr : return address
441  // -- sp[0] : generator receiver
442  // -----------------------------------
443 
444  // Copy the function arguments from the generator object's register file.
445  __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
446  __ ldrh(r3,
447  FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
448  __ ldr(r2,
449  FieldMemOperand(r1, JSGeneratorObject::kParametersAndRegistersOffset));
450  {
451  Label done_loop, loop;
452  __ mov(r6, Operand(0));
453 
454  __ bind(&loop);
455  __ cmp(r6, r3);
456  __ b(ge, &done_loop);
457  __ add(scratch, r2, Operand(r6, LSL, kPointerSizeLog2));
458  __ ldr(scratch, FieldMemOperand(scratch, FixedArray::kHeaderSize));
459  __ Push(scratch);
460  __ add(r6, r6, Operand(1));
461  __ b(&loop);
462 
463  __ bind(&done_loop);
464  }
465 
466  // Underlying function needs to have bytecode available.
467  if (FLAG_debug_code) {
468  __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
469  __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
470  GetSharedFunctionInfoBytecode(masm, r3, r0);
471  __ CompareObjectType(r3, r3, r3, BYTECODE_ARRAY_TYPE);
472  __ Assert(eq, AbortReason::kMissingBytecodeArray);
473  }
474 
475  // Resume (Ignition/TurboFan) generator object.
476  {
477  __ ldr(r0, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
478  __ ldrh(r0, FieldMemOperand(
479  r0, SharedFunctionInfo::kFormalParameterCountOffset));
480  // We abuse new.target both to indicate that this is a resume call and to
481  // pass in the generator object. In ordinary calls, new.target is always
482  // undefined because generator functions are non-constructable.
483  __ Move(r3, r1);
484  __ Move(r1, r4);
485  static_assert(kJavaScriptCallCodeStartRegister == r2, "ABI mismatch");
486  __ ldr(r2, FieldMemOperand(r1, JSFunction::kCodeOffset));
487  __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
488  __ Jump(r2);
489  }
490 
491  __ bind(&prepare_step_in_if_stepping);
492  {
493  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
494  __ Push(r1, r4);
495  // Push hole as receiver since we do not use it for stepping.
496  __ PushRoot(RootIndex::kTheHoleValue);
497  __ CallRuntime(Runtime::kDebugOnFunctionCall);
498  __ Pop(r1);
499  __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
500  }
501  __ b(&stepping_prepared);
502 
503  __ bind(&prepare_step_in_suspended_generator);
504  {
505  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
506  __ Push(r1);
507  __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
508  __ Pop(r1);
509  __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
510  }
511  __ b(&stepping_prepared);
512 
513  __ bind(&stack_overflow);
514  {
515  FrameScope scope(masm, StackFrame::INTERNAL);
516  __ CallRuntime(Runtime::kThrowStackOverflow);
517  __ bkpt(0); // This should be unreachable.
518  }
519 }
520 
521 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
522  FrameScope scope(masm, StackFrame::INTERNAL);
523  __ push(r1);
524  __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
525 }
526 
527 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
528  bool is_construct) {
529  // Called from Generate_JS_Entry
530  // r0: new.target
531  // r1: function
532  // r2: receiver
533  // r3: argc
534  // r4: argv
535  // r5-r6, r8 and cp may be clobbered
536 
537  // Enter an internal frame.
538  {
539  FrameScope scope(masm, StackFrame::INTERNAL);
540 
541  // Setup the context (we need to use the caller context from the isolate).
542  ExternalReference context_address = ExternalReference::Create(
543  IsolateAddressId::kContextAddress, masm->isolate());
544  __ Move(cp, context_address);
545  __ ldr(cp, MemOperand(cp));
546 
547  // Push the function and the receiver onto the stack.
548  __ Push(r1, r2);
549 
550  // Check if we have enough stack space to push all arguments.
551  // Clobbers r2.
552  Label enough_stack_space, stack_overflow;
553  Generate_StackOverflowCheck(masm, r3, r2, &stack_overflow);
554  __ b(&enough_stack_space);
555  __ bind(&stack_overflow);
556  __ CallRuntime(Runtime::kThrowStackOverflow);
557  // Unreachable code.
558  __ bkpt(0);
559 
560  __ bind(&enough_stack_space);
561 
562  // Remember new.target.
563  __ mov(r5, r0);
564 
565  // Copy arguments to the stack in a loop.
566  // r1: function
567  // r3: argc
568  // r4: argv, i.e. points to first arg
569  Label loop, entry;
570  __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
571  // r2 points past last arg.
572  __ b(&entry);
573  __ bind(&loop);
574  __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
575  __ ldr(r0, MemOperand(r0)); // dereference handle
576  __ push(r0); // push parameter
577  __ bind(&entry);
578  __ cmp(r4, r2);
579  __ b(ne, &loop);
580 
581  // Setup new.target and argc.
582  __ mov(r0, Operand(r3));
583  __ mov(r3, Operand(r5));
584 
585  // Initialize all JavaScript callee-saved registers, since they will be seen
586  // by the garbage collector as part of handlers.
587  __ LoadRoot(r4, RootIndex::kUndefinedValue);
588  __ mov(r5, Operand(r4));
589  __ mov(r6, Operand(r4));
590  __ mov(r8, Operand(r4));
591  if (kR9Available == 1) {
592  __ mov(r9, Operand(r4));
593  }
594 
595  // Invoke the code.
596  Handle<Code> builtin = is_construct
597  ? BUILTIN_CODE(masm->isolate(), Construct)
598  : masm->isolate()->builtins()->Call();
599  __ Call(builtin, RelocInfo::CODE_TARGET);
600 
601  // Exit the JS frame and remove the parameters (except function), and
602  // return.
603  // Respect ABI stack constraint.
604  }
605  __ Jump(lr);
606 
607  // r0: result
608 }
609 
610 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
611  Generate_JSEntryTrampolineHelper(masm, false);
612 }
613 
614 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
615  Generate_JSEntryTrampolineHelper(masm, true);
616 }
617 
618 static void ReplaceClosureCodeWithOptimizedCode(
619  MacroAssembler* masm, Register optimized_code, Register closure,
620  Register scratch1, Register scratch2, Register scratch3) {
621  // Store code entry in the closure.
622  __ str(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset));
623  __ mov(scratch1, optimized_code); // Write barrier clobbers scratch1 below.
624  __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
625  kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
626  OMIT_SMI_CHECK);
627 }
628 
629 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
630  Register args_count = scratch;
631 
632  // Get the arguments + receiver count.
633  __ ldr(args_count,
634  MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
635  __ ldr(args_count,
636  FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
637 
638  // Leave the frame (also dropping the register file).
639  __ LeaveFrame(StackFrame::INTERPRETED);
640 
641  // Drop receiver + arguments.
642  __ add(sp, sp, args_count, LeaveCC);
643 }
644 
645 // Tail-call |function_id| if |smi_entry| == |marker|
646 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
647  Register smi_entry,
648  OptimizationMarker marker,
649  Runtime::FunctionId function_id) {
650  Label no_match;
651  __ cmp(smi_entry, Operand(Smi::FromEnum(marker)));
652  __ b(ne, &no_match);
653  GenerateTailCallToReturnedCode(masm, function_id);
654  __ bind(&no_match);
655 }
656 
657 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
658  Register feedback_vector,
659  Register scratch1, Register scratch2,
660  Register scratch3) {
661  // ----------- S t a t e -------------
662  // -- r0 : argument count (preserved for callee if needed, and caller)
663  // -- r3 : new target (preserved for callee if needed, and caller)
664  // -- r1 : target function (preserved for callee if needed, and caller)
665  // -- feedback vector (preserved for caller if needed)
666  // -----------------------------------
667  DCHECK(
668  !AreAliased(feedback_vector, r0, r1, r3, scratch1, scratch2, scratch3));
669 
670  Label optimized_code_slot_is_weak_ref, fallthrough;
671 
672  Register closure = r1;
673  Register optimized_code_entry = scratch1;
674 
675  __ ldr(
676  optimized_code_entry,
677  FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
678 
679  // Check if the code entry is a Smi. If yes, we interpret it as an
680  // optimisation marker. Otherwise, interpret it as a weak reference to a code
681  // object.
682  __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
683 
684  {
685  // Optimized code slot is a Smi optimization marker.
686 
687  // Fall through if no optimization trigger.
688  __ cmp(optimized_code_entry,
689  Operand(Smi::FromEnum(OptimizationMarker::kNone)));
690  __ b(eq, &fallthrough);
691 
692  // TODO(v8:8394): The logging of first execution will break if
693  // feedback vectors are not allocated. We need to find a different way of
694  // logging these events if required.
695  TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
696  OptimizationMarker::kLogFirstExecution,
697  Runtime::kFunctionFirstExecution);
698  TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
699  OptimizationMarker::kCompileOptimized,
700  Runtime::kCompileOptimized_NotConcurrent);
701  TailCallRuntimeIfMarkerEquals(
702  masm, optimized_code_entry,
703  OptimizationMarker::kCompileOptimizedConcurrent,
704  Runtime::kCompileOptimized_Concurrent);
705 
706  {
707  // Otherwise, the marker is InOptimizationQueue, so fall through hoping
708  // that an interrupt will eventually update the slot with optimized code.
709  if (FLAG_debug_code) {
710  __ cmp(
711  optimized_code_entry,
712  Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
713  __ Assert(eq, AbortReason::kExpectedOptimizationSentinel);
714  }
715  __ jmp(&fallthrough);
716  }
717  }
718 
719  {
720  // Optimized code slot is a weak reference.
721  __ bind(&optimized_code_slot_is_weak_ref);
722 
723  __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
724 
725  // Check if the optimized code is marked for deopt. If it is, call the
726  // runtime to clear it.
727  Label found_deoptimized_code;
728  __ ldr(scratch2, FieldMemOperand(optimized_code_entry,
729  Code::kCodeDataContainerOffset));
730  __ ldr(
731  scratch2,
732  FieldMemOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset));
733  __ tst(scratch2, Operand(1 << Code::kMarkedForDeoptimizationBit));
734  __ b(ne, &found_deoptimized_code);
735 
736  // Optimized code is good, get it into the closure and link the closure into
737  // the optimized functions list, then tail call the optimized code.
738  // The feedback vector is no longer used, so re-use it as a scratch
739  // register.
740  ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
741  scratch2, scratch3, feedback_vector);
742  static_assert(kJavaScriptCallCodeStartRegister == r2, "ABI mismatch");
743  __ add(r2, optimized_code_entry,
744  Operand(Code::kHeaderSize - kHeapObjectTag));
745  __ Jump(r2);
746 
747  // Optimized code slot contains deoptimized code, evict it and re-enter the
748  // closure's code.
749  __ bind(&found_deoptimized_code);
750  GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
751  }
752 
753  // Fall-through if the optimized code cell is clear and there is no
754  // optimization marker.
755  __ bind(&fallthrough);
756 }
757 
758 // Advance the current bytecode offset. This simulates what all bytecode
759 // handlers do upon completion of the underlying operation. Will bail out to a
760 // label if the bytecode (without prefix) is a return bytecode.
761 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
762  Register bytecode_array,
763  Register bytecode_offset,
764  Register bytecode, Register scratch1,
765  Label* if_return) {
766  Register bytecode_size_table = scratch1;
767  DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
768  bytecode));
769 
770  __ Move(bytecode_size_table,
771  ExternalReference::bytecode_size_table_address());
772 
773  // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
774  Label process_bytecode, extra_wide;
775  STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
776  STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
777  STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
778  STATIC_ASSERT(3 ==
779  static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
780  __ cmp(bytecode, Operand(0x3));
781  __ b(hi, &process_bytecode);
782  __ tst(bytecode, Operand(0x1));
783  __ b(ne, &extra_wide);
784 
785  // Load the next bytecode and update table to the wide scaled table.
786  __ add(bytecode_offset, bytecode_offset, Operand(1));
787  __ ldrb(bytecode, MemOperand(bytecode_array, bytecode_offset));
788  __ add(bytecode_size_table, bytecode_size_table,
789  Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
790  __ jmp(&process_bytecode);
791 
792  __ bind(&extra_wide);
793  // Load the next bytecode and update table to the extra wide scaled table.
794  __ add(bytecode_offset, bytecode_offset, Operand(1));
795  __ ldrb(bytecode, MemOperand(bytecode_array, bytecode_offset));
796  __ add(bytecode_size_table, bytecode_size_table,
797  Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
798 
799  __ bind(&process_bytecode);
800 
801 // Bailout to the return label if this is a return bytecode.
802 #define JUMP_IF_EQUAL(NAME) \
803  __ cmp(bytecode, Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \
804  __ b(if_return, eq);
805  RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
806 #undef JUMP_IF_EQUAL
807 
808  // Otherwise, load the size of the current bytecode and advance the offset.
809  __ ldr(scratch1, MemOperand(bytecode_size_table, bytecode, LSL, 2));
810  __ add(bytecode_offset, bytecode_offset, scratch1);
811 }
812 
813 // Generate code for entering a JS function with the interpreter.
814 // On entry to the function the receiver and arguments have been pushed on the
815 // stack left to right. The actual argument count matches the formal parameter
816 // count expected by the function.
817 //
818 // The live registers are:
819 // o r1: the JS function object being called.
820 // o r3: the incoming new target or generator object
821 // o cp: our context
822 // o fp: the caller's frame pointer
823 // o sp: stack pointer
824 // o lr: return address
825 //
826 // The function builds an interpreter frame. See InterpreterFrameConstants in
827 // frames.h for its layout.
828 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
829  Register closure = r1;
830  Register feedback_vector = r2;
831 
832  // Load the feedback vector from the closure.
833  __ ldr(feedback_vector,
834  FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
835  __ ldr(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset));
836 
837  Label push_stack_frame;
838  // Check if feedback vector is valid. If valid, check for optimized code
839  // and update invocation count. Otherwise, setup the stack frame.
840  __ CompareRoot(feedback_vector, RootIndex::kUndefinedValue);
841  __ b(eq, &push_stack_frame);
842 
843  // Read off the optimized code slot in the feedback vector, and if there
844  // is optimized code or an optimization marker, call that instead.
845  MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, r4, r6, r5);
846 
847  // Increment invocation count for the function.
848  __ ldr(r9, FieldMemOperand(feedback_vector,
849  FeedbackVector::kInvocationCountOffset));
850  __ add(r9, r9, Operand(1));
851  __ str(r9, FieldMemOperand(feedback_vector,
852  FeedbackVector::kInvocationCountOffset));
853 
854  // Open a frame scope to indicate that there is a frame on the stack. The
855  // MANUAL indicates that the scope shouldn't actually generate code to set up
856  // the frame (that is done below).
857  __ bind(&push_stack_frame);
858  FrameScope frame_scope(masm, StackFrame::MANUAL);
859  __ PushStandardFrame(closure);
860 
861  // Get the bytecode array from the function object and load it into
862  // kInterpreterBytecodeArrayRegister.
863  __ ldr(r0, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
864  __ ldr(kInterpreterBytecodeArrayRegister,
865  FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset));
866  GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, r4);
867 
868  // Check function data field is actually a BytecodeArray object.
869  if (FLAG_debug_code) {
870  __ SmiTst(kInterpreterBytecodeArrayRegister);
871  __ Assert(
872  ne, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
873  __ CompareObjectType(kInterpreterBytecodeArrayRegister, r0, no_reg,
874  BYTECODE_ARRAY_TYPE);
875  __ Assert(
876  eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
877  }
878 
879  // Reset code age.
880  __ mov(r9, Operand(BytecodeArray::kNoAgeBytecodeAge));
881  __ strb(r9, FieldMemOperand(kInterpreterBytecodeArrayRegister,
882  BytecodeArray::kBytecodeAgeOffset));
883 
884  // Load the initial bytecode offset.
885  __ mov(kInterpreterBytecodeOffsetRegister,
886  Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
887 
888  // Push bytecode array and Smi tagged bytecode array offset.
889  __ SmiTag(r0, kInterpreterBytecodeOffsetRegister);
890  __ Push(kInterpreterBytecodeArrayRegister, r0);
891 
892  // Allocate the local and temporary register file on the stack.
893  {
894  // Load frame size from the BytecodeArray object.
895  __ ldr(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
896  BytecodeArray::kFrameSizeOffset));
897 
898  // Do a stack check to ensure we don't go over the limit.
899  Label ok;
900  __ sub(r9, sp, Operand(r4));
901  __ LoadRoot(r2, RootIndex::kRealStackLimit);
902  __ cmp(r9, Operand(r2));
903  __ b(hs, &ok);
904  __ CallRuntime(Runtime::kThrowStackOverflow);
905  __ bind(&ok);
906 
907  // If ok, push undefined as the initial value for all register file entries.
908  Label loop_header;
909  Label loop_check;
910  __ LoadRoot(r9, RootIndex::kUndefinedValue);
911  __ b(&loop_check, al);
912  __ bind(&loop_header);
913  // TODO(rmcilroy): Consider doing more than one push per loop iteration.
914  __ push(r9);
915  // Continue loop if not done.
916  __ bind(&loop_check);
917  __ sub(r4, r4, Operand(kPointerSize), SetCC);
918  __ b(&loop_header, ge);
919  }
920 
921  // If the bytecode array has a valid incoming new target or generator object
922  // register, initialize it with incoming value which was passed in r3.
923  __ ldr(r9, FieldMemOperand(
924  kInterpreterBytecodeArrayRegister,
925  BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
926  __ cmp(r9, Operand::Zero());
927  __ str(r3, MemOperand(fp, r9, LSL, kPointerSizeLog2), ne);
928 
929  // Load accumulator with undefined.
930  __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
931 
932  // Load the dispatch table into a register and dispatch to the bytecode
933  // handler at the current bytecode offset.
934  Label do_dispatch;
935  __ bind(&do_dispatch);
936  __ Move(
937  kInterpreterDispatchTableRegister,
938  ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
939  __ ldrb(r4, MemOperand(kInterpreterBytecodeArrayRegister,
940  kInterpreterBytecodeOffsetRegister));
941  __ ldr(
942  kJavaScriptCallCodeStartRegister,
943  MemOperand(kInterpreterDispatchTableRegister, r4, LSL, kPointerSizeLog2));
944  __ Call(kJavaScriptCallCodeStartRegister);
945  masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
946 
947  // Any returns to the entry trampoline are either due to the return bytecode
948  // or the interpreter tail calling a builtin and then a dispatch.
949 
950  // Get bytecode array and bytecode offset from the stack frame.
951  __ ldr(kInterpreterBytecodeArrayRegister,
952  MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
953  __ ldr(kInterpreterBytecodeOffsetRegister,
954  MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
955  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
956 
957  // Either return, or advance to the next bytecode and dispatch.
958  Label do_return;
959  __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
960  kInterpreterBytecodeOffsetRegister));
961  AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
962  kInterpreterBytecodeOffsetRegister, r1, r2,
963  &do_return);
964  __ jmp(&do_dispatch);
965 
966  __ bind(&do_return);
967  // The return value is in r0.
968  LeaveInterpreterFrame(masm, r2);
969  __ Jump(lr);
970 }
971 
972 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
973  Register num_args, Register index,
974  Register limit, Register scratch) {
975  // Find the address of the last argument.
976  __ mov(limit, num_args);
977  __ mov(limit, Operand(limit, LSL, kPointerSizeLog2));
978  __ sub(limit, index, limit);
979 
980  Label loop_header, loop_check;
981  __ b(al, &loop_check);
982  __ bind(&loop_header);
983  __ ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
984  __ push(scratch);
985  __ bind(&loop_check);
986  __ cmp(index, limit);
987  __ b(gt, &loop_header);
988 }
989 
990 // static
991 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
992  MacroAssembler* masm, ConvertReceiverMode receiver_mode,
993  InterpreterPushArgsMode mode) {
994  DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
995  // ----------- S t a t e -------------
996  // -- r0 : the number of arguments (not including the receiver)
997  // -- r2 : the address of the first argument to be pushed. Subsequent
998  // arguments should be consecutive above this, in the same order as
999  // they are to be pushed onto the stack.
1000  // -- r1 : the target to call (can be any Object).
1001  // -----------------------------------
1002  Label stack_overflow;
1003 
1004  __ add(r3, r0, Operand(1)); // Add one for receiver.
1005 
1006  Generate_StackOverflowCheck(masm, r3, r4, &stack_overflow);
1007 
1008  // Push "undefined" as the receiver arg if we need to.
1009  if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1010  __ PushRoot(RootIndex::kUndefinedValue);
1011  __ mov(r3, r0); // Argument count is correct.
1012  }
1013 
1014  // Push the arguments. r2, r4, r5 will be modified.
1015  Generate_InterpreterPushArgs(masm, r3, r2, r4, r5);
1016 
1017  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1018  __ Pop(r2); // Pass the spread in a register
1019  __ sub(r0, r0, Operand(1)); // Subtract one for spread
1020  }
1021 
1022  // Call the target.
1023  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1024  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1025  RelocInfo::CODE_TARGET);
1026  } else {
1027  __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1028  RelocInfo::CODE_TARGET);
1029  }
1030 
1031  __ bind(&stack_overflow);
1032  {
1033  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1034  // Unreachable code.
1035  __ bkpt(0);
1036  }
1037 }
1038 
1039 // static
1040 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1041  MacroAssembler* masm, InterpreterPushArgsMode mode) {
1042  // ----------- S t a t e -------------
1043  // -- r0 : argument count (not including receiver)
1044  // -- r3 : new target
1045  // -- r1 : constructor to call
1046  // -- r2 : allocation site feedback if available, undefined otherwise.
1047  // -- r4 : address of the first argument
1048  // -----------------------------------
1049  Label stack_overflow;
1050 
1051  // Push a slot for the receiver to be constructed.
1052  __ mov(r5, Operand::Zero());
1053  __ push(r5);
1054 
1055  Generate_StackOverflowCheck(masm, r0, r5, &stack_overflow);
1056 
1057  // Push the arguments. r5, r4, r6 will be modified.
1058  Generate_InterpreterPushArgs(masm, r0, r4, r5, r6);
1059 
1060  if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1061  __ Pop(r2); // Pass the spread in a register
1062  __ sub(r0, r0, Operand(1)); // Subtract one for spread
1063  } else {
1064  __ AssertUndefinedOrAllocationSite(r2, r5);
1065  }
1066 
1067  if (mode == InterpreterPushArgsMode::kArrayFunction) {
1068  __ AssertFunction(r1);
1069 
1070  // Tail call to the array construct stub (still in the caller
1071  // context at this point).
1072  Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1073  __ Jump(code, RelocInfo::CODE_TARGET);
1074  } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1075  // Call the constructor with r0, r1, and r3 unmodified.
1076  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1077  RelocInfo::CODE_TARGET);
1078  } else {
1079  DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1080  // Call the constructor with r0, r1, and r3 unmodified.
1081  __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1082  }
1083 
1084  __ bind(&stack_overflow);
1085  {
1086  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1087  // Unreachable code.
1088  __ bkpt(0);
1089  }
1090 }
1091 
1092 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1093  // Set the return address to the correct point in the interpreter entry
1094  // trampoline.
1095  Label builtin_trampoline, trampoline_loaded;
1096  Smi interpreter_entry_return_pc_offset(
1097  masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1098  DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1099 
1100  // If the SFI function_data is an InterpreterData, the function will have a
1101  // custom copy of the interpreter entry trampoline for profiling. If so,
1102  // get the custom trampoline, otherwise grab the entry address of the global
1103  // trampoline.
1104  __ ldr(r2, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1105  __ ldr(r2, FieldMemOperand(r2, JSFunction::kSharedFunctionInfoOffset));
1106  __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kFunctionDataOffset));
1107  __ CompareObjectType(r2, kInterpreterDispatchTableRegister,
1108  kInterpreterDispatchTableRegister,
1109  INTERPRETER_DATA_TYPE);
1110  __ b(ne, &builtin_trampoline);
1111 
1112  __ ldr(r2,
1113  FieldMemOperand(r2, InterpreterData::kInterpreterTrampolineOffset));
1114  __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
1115  __ b(&trampoline_loaded);
1116 
1117  __ bind(&builtin_trampoline);
1118  __ Move(r2, ExternalReference::
1119  address_of_interpreter_entry_trampoline_instruction_start(
1120  masm->isolate()));
1121  __ ldr(r2, MemOperand(r2));
1122 
1123  __ bind(&trampoline_loaded);
1124  __ add(lr, r2, Operand(interpreter_entry_return_pc_offset->value()));
1125 
1126  // Initialize the dispatch table register.
1127  __ Move(
1128  kInterpreterDispatchTableRegister,
1129  ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1130 
1131  // Get the bytecode array pointer from the frame.
1132  __ ldr(kInterpreterBytecodeArrayRegister,
1133  MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1134 
1135  if (FLAG_debug_code) {
1136  // Check function data field is actually a BytecodeArray object.
1137  __ SmiTst(kInterpreterBytecodeArrayRegister);
1138  __ Assert(
1139  ne, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1140  __ CompareObjectType(kInterpreterBytecodeArrayRegister, r1, no_reg,
1141  BYTECODE_ARRAY_TYPE);
1142  __ Assert(
1143  eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1144  }
1145 
1146  // Get the target bytecode offset from the frame.
1147  __ ldr(kInterpreterBytecodeOffsetRegister,
1148  MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1149  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1150 
1151  // Dispatch to the target bytecode.
1152  UseScratchRegisterScope temps(masm);
1153  Register scratch = temps.Acquire();
1154  __ ldrb(scratch, MemOperand(kInterpreterBytecodeArrayRegister,
1155  kInterpreterBytecodeOffsetRegister));
1156  __ ldr(kJavaScriptCallCodeStartRegister,
1157  MemOperand(kInterpreterDispatchTableRegister, scratch, LSL,
1158  kPointerSizeLog2));
1159  __ Jump(kJavaScriptCallCodeStartRegister);
1160 }
1161 
1162 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1163  // Get bytecode array and bytecode offset from the stack frame.
1164  __ ldr(kInterpreterBytecodeArrayRegister,
1165  MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1166  __ ldr(kInterpreterBytecodeOffsetRegister,
1167  MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1168  __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1169 
1170  // Load the current bytecode.
1171  __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
1172  kInterpreterBytecodeOffsetRegister));
1173 
1174  // Advance to the next bytecode.
1175  Label if_return;
1176  AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1177  kInterpreterBytecodeOffsetRegister, r1, r2,
1178  &if_return);
1179 
1180  // Convert new bytecode offset to a Smi and save in the stackframe.
1181  __ SmiTag(r2, kInterpreterBytecodeOffsetRegister);
1182  __ str(r2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1183 
1184  Generate_InterpreterEnterBytecode(masm);
1185 
1186  // We should never take the if_return path.
1187  __ bind(&if_return);
1188  __ Abort(AbortReason::kInvalidBytecodeAdvance);
1189 }
1190 
1191 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1192  Generate_InterpreterEnterBytecode(masm);
1193 }
1194 
1195 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1196  // ----------- S t a t e -------------
1197  // -- r0 : argument count (preserved for callee)
1198  // -- r1 : new target (preserved for callee)
1199  // -- r3 : target function (preserved for callee)
1200  // -----------------------------------
1201  Label failed;
1202  {
1203  FrameScope scope(masm, StackFrame::INTERNAL);
1204  // Preserve argument count for later compare.
1205  __ Move(r4, r0);
1206  // Push the number of arguments to the callee.
1207  __ SmiTag(r0);
1208  __ push(r0);
1209  // Push a copy of the target function and the new target.
1210  __ push(r1);
1211  __ push(r3);
1212 
1213  // The function.
1214  __ push(r1);
1215  // Copy arguments from caller (stdlib, foreign, heap).
1216  Label args_done;
1217  for (int j = 0; j < 4; ++j) {
1218  Label over;
1219  if (j < 3) {
1220  __ cmp(r4, Operand(j));
1221  __ b(ne, &over);
1222  }
1223  for (int i = j - 1; i >= 0; --i) {
1224  __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1225  i * kPointerSize));
1226  __ push(r4);
1227  }
1228  for (int i = 0; i < 3 - j; ++i) {
1229  __ PushRoot(RootIndex::kUndefinedValue);
1230  }
1231  if (j < 3) {
1232  __ jmp(&args_done);
1233  __ bind(&over);
1234  }
1235  }
1236  __ bind(&args_done);
1237 
1238  // Call runtime, on success unwind frame, and parent frame.
1239  __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1240  // A smi 0 is returned on failure, an object on success.
1241  __ JumpIfSmi(r0, &failed);
1242 
1243  __ Drop(2);
1244  __ pop(r4);
1245  __ SmiUntag(r4);
1246  scope.GenerateLeaveFrame();
1247 
1248  __ add(r4, r4, Operand(1));
1249  __ Drop(r4);
1250  __ Ret();
1251 
1252  __ bind(&failed);
1253  // Restore target function and new target.
1254  __ pop(r3);
1255  __ pop(r1);
1256  __ pop(r0);
1257  __ SmiUntag(r0);
1258  }
1259  // On failure, tail call back to regular js by re-calling the function
1260  // which has be reset to the compile lazy builtin.
1261  static_assert(kJavaScriptCallCodeStartRegister == r2, "ABI mismatch");
1262  __ ldr(r2, FieldMemOperand(r1, JSFunction::kCodeOffset));
1263  __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
1264  __ Jump(r2);
1265 }
1266 
1267 namespace {
1268 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1269  bool java_script_builtin,
1270  bool with_result) {
1271  const RegisterConfiguration* config(RegisterConfiguration::Default());
1272  int allocatable_register_count = config->num_allocatable_general_registers();
1273  if (with_result) {
1274  // Overwrite the hole inserted by the deoptimizer with the return value from
1275  // the LAZY deopt point.
1276  __ str(r0,
1277  MemOperand(
1278  sp, config->num_allocatable_general_registers() * kPointerSize +
1279  BuiltinContinuationFrameConstants::kFixedFrameSize));
1280  }
1281  for (int i = allocatable_register_count - 1; i >= 0; --i) {
1282  int code = config->GetAllocatableGeneralCode(i);
1283  __ Pop(Register::from_code(code));
1284  if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1285  __ SmiUntag(Register::from_code(code));
1286  }
1287  }
1288  __ ldr(fp, MemOperand(
1289  sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1290 
1291  UseScratchRegisterScope temps(masm);
1292  Register scratch = temps.Acquire();
1293  __ Pop(scratch);
1294  __ add(sp, sp,
1295  Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1296  __ Pop(lr);
1297  __ add(pc, scratch, Operand(Code::kHeaderSize - kHeapObjectTag));
1298 }
1299 } // namespace
1300 
1301 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1302  Generate_ContinueToBuiltinHelper(masm, false, false);
1303 }
1304 
1305 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1306  MacroAssembler* masm) {
1307  Generate_ContinueToBuiltinHelper(masm, false, true);
1308 }
1309 
1310 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1311  Generate_ContinueToBuiltinHelper(masm, true, false);
1312 }
1313 
1314 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1315  MacroAssembler* masm) {
1316  Generate_ContinueToBuiltinHelper(masm, true, true);
1317 }
1318 
1319 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1320  {
1321  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1322  __ CallRuntime(Runtime::kNotifyDeoptimized);
1323  }
1324 
1325  DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r0.code());
1326  __ pop(r0);
1327  __ Ret();
1328 }
1329 
1330 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1331  // Lookup the function in the JavaScript frame.
1332  __ ldr(r0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1333  __ ldr(r0, MemOperand(r0, JavaScriptFrameConstants::kFunctionOffset));
1334 
1335  {
1336  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1337  // Pass function as argument.
1338  __ push(r0);
1339  __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1340  }
1341 
1342  // If the code object is null, just return to the caller.
1343  Label skip;
1344  __ cmp(r0, Operand(Smi::zero()));
1345  __ b(ne, &skip);
1346  __ Ret();
1347 
1348  __ bind(&skip);
1349 
1350  // Drop the handler frame that is be sitting on top of the actual
1351  // JavaScript frame. This is the case then OSR is triggered from bytecode.
1352  __ LeaveFrame(StackFrame::STUB);
1353 
1354  // Load deoptimization data from the code object.
1355  // <deopt_data> = <code>[#deoptimization_data_offset]
1356  __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
1357 
1358  {
1359  ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1360  __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1361 
1362  // Load the OSR entrypoint offset from the deoptimization data.
1363  // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1364  __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(
1365  DeoptimizationData::kOsrPcOffsetIndex)));
1366 
1367  // Compute the target address = code start + osr_offset
1368  __ add(lr, r0, Operand::SmiUntag(r1));
1369 
1370  // And "return" to the OSR entry point of the function.
1371  __ Ret();
1372  }
1373 }
1374 
1375 // static
1376 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1377  // ----------- S t a t e -------------
1378  // -- r0 : argc
1379  // -- sp[0] : argArray
1380  // -- sp[4] : thisArg
1381  // -- sp[8] : receiver
1382  // -----------------------------------
1383 
1384  // 1. Load receiver into r1, argArray into r2 (if present), remove all
1385  // arguments from the stack (including the receiver), and push thisArg (if
1386  // present) instead.
1387  {
1388  __ LoadRoot(r5, RootIndex::kUndefinedValue);
1389  __ mov(r2, r5);
1390  __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); // receiver
1391  __ sub(r4, r0, Operand(1), SetCC);
1392  __ ldr(r5, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // thisArg
1393  __ sub(r4, r4, Operand(1), SetCC, ge);
1394  __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argArray
1395  __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1396  __ str(r5, MemOperand(sp, 0));
1397  }
1398 
1399  // ----------- S t a t e -------------
1400  // -- r2 : argArray
1401  // -- r1 : receiver
1402  // -- sp[0] : thisArg
1403  // -----------------------------------
1404 
1405  // 2. We don't need to check explicitly for callable receiver here,
1406  // since that's the first thing the Call/CallWithArrayLike builtins
1407  // will do.
1408 
1409  // 3. Tail call with no arguments if argArray is null or undefined.
1410  Label no_arguments;
1411  __ JumpIfRoot(r2, RootIndex::kNullValue, &no_arguments);
1412  __ JumpIfRoot(r2, RootIndex::kUndefinedValue, &no_arguments);
1413 
1414  // 4a. Apply the receiver to the given argArray.
1415  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1416  RelocInfo::CODE_TARGET);
1417 
1418  // 4b. The argArray is either null or undefined, so we tail call without any
1419  // arguments to the receiver.
1420  __ bind(&no_arguments);
1421  {
1422  __ mov(r0, Operand(0));
1423  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1424  }
1425 }
1426 
1427 // static
1428 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1429  // 1. Make sure we have at least one argument.
1430  // r0: actual number of arguments
1431  {
1432  Label done;
1433  __ cmp(r0, Operand::Zero());
1434  __ b(ne, &done);
1435  __ PushRoot(RootIndex::kUndefinedValue);
1436  __ add(r0, r0, Operand(1));
1437  __ bind(&done);
1438  }
1439 
1440  // 2. Get the callable to call (passed as receiver) from the stack.
1441  // r0: actual number of arguments
1442  __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1443 
1444  // 3. Shift arguments and return address one slot down on the stack
1445  // (overwriting the original receiver). Adjust argument count to make
1446  // the original first argument the new receiver.
1447  // r0: actual number of arguments
1448  // r1: callable
1449  {
1450  Register scratch = r3;
1451  Label loop;
1452  // Calculate the copy start address (destination). Copy end address is sp.
1453  __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1454 
1455  __ bind(&loop);
1456  __ ldr(scratch, MemOperand(r2, -kPointerSize));
1457  __ str(scratch, MemOperand(r2));
1458  __ sub(r2, r2, Operand(kPointerSize));
1459  __ cmp(r2, sp);
1460  __ b(ne, &loop);
1461  // Adjust the actual number of arguments and remove the top element
1462  // (which is a copy of the last argument).
1463  __ sub(r0, r0, Operand(1));
1464  __ pop();
1465  }
1466 
1467  // 4. Call the callable.
1468  __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1469 }
1470 
1471 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1472  // ----------- S t a t e -------------
1473  // -- r0 : argc
1474  // -- sp[0] : argumentsList
1475  // -- sp[4] : thisArgument
1476  // -- sp[8] : target
1477  // -- sp[12] : receiver
1478  // -----------------------------------
1479 
1480  // 1. Load target into r1 (if present), argumentsList into r2 (if present),
1481  // remove all arguments from the stack (including the receiver), and push
1482  // thisArgument (if present) instead.
1483  {
1484  __ LoadRoot(r1, RootIndex::kUndefinedValue);
1485  __ mov(r5, r1);
1486  __ mov(r2, r1);
1487  __ sub(r4, r0, Operand(1), SetCC);
1488  __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // target
1489  __ sub(r4, r4, Operand(1), SetCC, ge);
1490  __ ldr(r5, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // thisArgument
1491  __ sub(r4, r4, Operand(1), SetCC, ge);
1492  __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argumentsList
1493  __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1494  __ str(r5, MemOperand(sp, 0));
1495  }
1496 
1497  // ----------- S t a t e -------------
1498  // -- r2 : argumentsList
1499  // -- r1 : target
1500  // -- sp[0] : thisArgument
1501  // -----------------------------------
1502 
1503  // 2. We don't need to check explicitly for callable target here,
1504  // since that's the first thing the Call/CallWithArrayLike builtins
1505  // will do.
1506 
1507  // 3. Apply the target to the given argumentsList.
1508  __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1509  RelocInfo::CODE_TARGET);
1510 }
1511 
1512 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1513  // ----------- S t a t e -------------
1514  // -- r0 : argc
1515  // -- sp[0] : new.target (optional)
1516  // -- sp[4] : argumentsList
1517  // -- sp[8] : target
1518  // -- sp[12] : receiver
1519  // -----------------------------------
1520 
1521  // 1. Load target into r1 (if present), argumentsList into r2 (if present),
1522  // new.target into r3 (if present, otherwise use target), remove all
1523  // arguments from the stack (including the receiver), and push thisArgument
1524  // (if present) instead.
1525  {
1526  __ LoadRoot(r1, RootIndex::kUndefinedValue);
1527  __ mov(r2, r1);
1528  __ str(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2)); // receiver
1529  __ sub(r4, r0, Operand(1), SetCC);
1530  __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // target
1531  __ mov(r3, r1); // new.target defaults to target
1532  __ sub(r4, r4, Operand(1), SetCC, ge);
1533  __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // argumentsList
1534  __ sub(r4, r4, Operand(1), SetCC, ge);
1535  __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge); // new.target
1536  __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1537  }
1538 
1539  // ----------- S t a t e -------------
1540  // -- r2 : argumentsList
1541  // -- r3 : new.target
1542  // -- r1 : target
1543  // -- sp[0] : receiver (undefined)
1544  // -----------------------------------
1545 
1546  // 2. We don't need to check explicitly for constructor target here,
1547  // since that's the first thing the Construct/ConstructWithArrayLike
1548  // builtins will do.
1549 
1550  // 3. We don't need to check explicitly for constructor new.target here,
1551  // since that's the second thing the Construct/ConstructWithArrayLike
1552  // builtins will do.
1553 
1554  // 4. Construct the target with the given new.target and argumentsList.
1555  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1556  RelocInfo::CODE_TARGET);
1557 }
1558 
1559 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1560  __ SmiTag(r0);
1561  __ mov(r4, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1562  __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
1563  fp.bit() | lr.bit());
1564  __ Push(Smi::zero()); // Padding.
1565  __ add(fp, sp,
1566  Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
1567 }
1568 
1569 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1570  // ----------- S t a t e -------------
1571  // -- r0 : result being passed through
1572  // -----------------------------------
1573  // Get the number of arguments passed (as a smi), tear down the frame and
1574  // then tear down the parameters.
1575  __ ldr(r1, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1576 
1577  __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
1578  __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
1579  __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1580 }
1581 
1582 // static
1583 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1584  Handle<Code> code) {
1585  // ----------- S t a t e -------------
1586  // -- r1 : target
1587  // -- r0 : number of parameters on the stack (not including the receiver)
1588  // -- r2 : arguments list (a FixedArray)
1589  // -- r4 : len (number of elements to push from args)
1590  // -- r3 : new.target (for [[Construct]])
1591  // -----------------------------------
1592  Register scratch = r8;
1593 
1594  if (masm->emit_debug_code()) {
1595  // Allow r2 to be a FixedArray, or a FixedDoubleArray if r4 == 0.
1596  Label ok, fail;
1597  __ AssertNotSmi(r2);
1598  __ ldr(scratch, FieldMemOperand(r2, HeapObject::kMapOffset));
1599  __ ldrh(r6, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1600  __ cmp(r6, Operand(FIXED_ARRAY_TYPE));
1601  __ b(eq, &ok);
1602  __ cmp(r6, Operand(FIXED_DOUBLE_ARRAY_TYPE));
1603  __ b(ne, &fail);
1604  __ cmp(r4, Operand(0));
1605  __ b(eq, &ok);
1606  // Fall through.
1607  __ bind(&fail);
1608  __ Abort(AbortReason::kOperandIsNotAFixedArray);
1609 
1610  __ bind(&ok);
1611  }
1612 
1613  Label stack_overflow;
1614  Generate_StackOverflowCheck(masm, r4, scratch, &stack_overflow);
1615 
1616  // Push arguments onto the stack (thisArgument is already on the stack).
1617  {
1618  __ mov(r6, Operand(0));
1619  __ LoadRoot(r5, RootIndex::kTheHoleValue);
1620  Label done, loop;
1621  __ bind(&loop);
1622  __ cmp(r6, r4);
1623  __ b(eq, &done);
1624  __ add(scratch, r2, Operand(r6, LSL, kPointerSizeLog2));
1625  __ ldr(scratch, FieldMemOperand(scratch, FixedArray::kHeaderSize));
1626  __ cmp(scratch, r5);
1627  __ LoadRoot(scratch, RootIndex::kUndefinedValue, eq);
1628  __ Push(scratch);
1629  __ add(r6, r6, Operand(1));
1630  __ b(&loop);
1631  __ bind(&done);
1632  __ add(r0, r0, r6);
1633  }
1634 
1635  // Tail-call to the actual Call or Construct builtin.
1636  __ Jump(code, RelocInfo::CODE_TARGET);
1637 
1638  __ bind(&stack_overflow);
1639  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1640 }
1641 
1642 // static
1643 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1644  CallOrConstructMode mode,
1645  Handle<Code> code) {
1646  // ----------- S t a t e -------------
1647  // -- r0 : the number of arguments (not including the receiver)
1648  // -- r3 : the new.target (for [[Construct]] calls)
1649  // -- r1 : the target to call (can be any Object)
1650  // -- r2 : start index (to support rest parameters)
1651  // -----------------------------------
1652 
1653  Register scratch = r6;
1654 
1655  // Check if new.target has a [[Construct]] internal method.
1656  if (mode == CallOrConstructMode::kConstruct) {
1657  Label new_target_constructor, new_target_not_constructor;
1658  __ JumpIfSmi(r3, &new_target_not_constructor);
1659  __ ldr(scratch, FieldMemOperand(r3, HeapObject::kMapOffset));
1660  __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1661  __ tst(scratch, Operand(Map::IsConstructorBit::kMask));
1662  __ b(ne, &new_target_constructor);
1663  __ bind(&new_target_not_constructor);
1664  {
1665  FrameScope scope(masm, StackFrame::MANUAL);
1666  __ EnterFrame(StackFrame::INTERNAL);
1667  __ Push(r3);
1668  __ CallRuntime(Runtime::kThrowNotConstructor);
1669  }
1670  __ bind(&new_target_constructor);
1671  }
1672 
1673  // Check if we have an arguments adaptor frame below the function frame.
1674  Label arguments_adaptor, arguments_done;
1675  __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1676  __ ldr(scratch,
1677  MemOperand(r4, CommonFrameConstants::kContextOrFrameTypeOffset));
1678  __ cmp(scratch,
1679  Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1680  __ b(eq, &arguments_adaptor);
1681  {
1682  __ ldr(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1683  __ ldr(r5, FieldMemOperand(r5, JSFunction::kSharedFunctionInfoOffset));
1684  __ ldrh(r5, FieldMemOperand(
1685  r5, SharedFunctionInfo::kFormalParameterCountOffset));
1686  __ mov(r4, fp);
1687  }
1688  __ b(&arguments_done);
1689  __ bind(&arguments_adaptor);
1690  {
1691  // Load the length from the ArgumentsAdaptorFrame.
1692  __ ldr(r5, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
1693  __ SmiUntag(r5);
1694  }
1695  __ bind(&arguments_done);
1696 
1697  Label stack_done, stack_overflow;
1698  __ sub(r5, r5, r2, SetCC);
1699  __ b(le, &stack_done);
1700  {
1701  // Check for stack overflow.
1702  Generate_StackOverflowCheck(masm, r5, r2, &stack_overflow);
1703 
1704  // Forward the arguments from the caller frame.
1705  {
1706  Label loop;
1707  __ add(r4, r4, Operand(kPointerSize));
1708  __ add(r0, r0, r5);
1709  __ bind(&loop);
1710  {
1711  __ ldr(scratch, MemOperand(r4, r5, LSL, kPointerSizeLog2));
1712  __ push(scratch);
1713  __ sub(r5, r5, Operand(1), SetCC);
1714  __ b(ne, &loop);
1715  }
1716  }
1717  }
1718  __ b(&stack_done);
1719  __ bind(&stack_overflow);
1720  __ TailCallRuntime(Runtime::kThrowStackOverflow);
1721  __ bind(&stack_done);
1722 
1723  // Tail-call to the {code} handler.
1724  __ Jump(code, RelocInfo::CODE_TARGET);
1725 }
1726 
1727 // static
1728 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1729  ConvertReceiverMode mode) {
1730  // ----------- S t a t e -------------
1731  // -- r0 : the number of arguments (not including the receiver)
1732  // -- r1 : the function to call (checked to be a JSFunction)
1733  // -----------------------------------
1734  __ AssertFunction(r1);
1735 
1736  // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1737  // Check that the function is not a "classConstructor".
1738  Label class_constructor;
1739  __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1740  __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kFlagsOffset));
1741  __ tst(r3, Operand(SharedFunctionInfo::IsClassConstructorBit::kMask));
1742  __ b(ne, &class_constructor);
1743 
1744  // Enter the context of the function; ToObject has to run in the function
1745  // context, and we also need to take the global proxy from the function
1746  // context in case of conversion.
1747  __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1748  // We need to convert the receiver for non-native sloppy mode functions.
1749  Label done_convert;
1750  __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kFlagsOffset));
1751  __ tst(r3, Operand(SharedFunctionInfo::IsNativeBit::kMask |
1752  SharedFunctionInfo::IsStrictBit::kMask));
1753  __ b(ne, &done_convert);
1754  {
1755  // ----------- S t a t e -------------
1756  // -- r0 : the number of arguments (not including the receiver)
1757  // -- r1 : the function to call (checked to be a JSFunction)
1758  // -- r2 : the shared function info.
1759  // -- cp : the function context.
1760  // -----------------------------------
1761 
1762  if (mode == ConvertReceiverMode::kNullOrUndefined) {
1763  // Patch receiver to global proxy.
1764  __ LoadGlobalProxy(r3);
1765  } else {
1766  Label convert_to_object, convert_receiver;
1767  __ ldr(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1768  __ JumpIfSmi(r3, &convert_to_object);
1769  STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1770  __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
1771  __ b(hs, &done_convert);
1772  if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1773  Label convert_global_proxy;
1774  __ JumpIfRoot(r3, RootIndex::kUndefinedValue, &convert_global_proxy);
1775  __ JumpIfNotRoot(r3, RootIndex::kNullValue, &convert_to_object);
1776  __ bind(&convert_global_proxy);
1777  {
1778  // Patch receiver to global proxy.
1779  __ LoadGlobalProxy(r3);
1780  }
1781  __ b(&convert_receiver);
1782  }
1783  __ bind(&convert_to_object);
1784  {
1785  // Convert receiver using ToObject.
1786  // TODO(bmeurer): Inline the allocation here to avoid building the frame
1787  // in the fast case? (fall back to AllocateInNewSpace?)
1788  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1789  __ SmiTag(r0);
1790  __ Push(r0, r1);
1791  __ mov(r0, r3);
1792  __ Push(cp);
1793  __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1794  RelocInfo::CODE_TARGET);
1795  __ Pop(cp);
1796  __ mov(r3, r0);
1797  __ Pop(r0, r1);
1798  __ SmiUntag(r0);
1799  }
1800  __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1801  __ bind(&convert_receiver);
1802  }
1803  __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1804  }
1805  __ bind(&done_convert);
1806 
1807  // ----------- S t a t e -------------
1808  // -- r0 : the number of arguments (not including the receiver)
1809  // -- r1 : the function to call (checked to be a JSFunction)
1810  // -- r2 : the shared function info.
1811  // -- cp : the function context.
1812  // -----------------------------------
1813 
1814  __ ldrh(r2,
1815  FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
1816  ParameterCount actual(r0);
1817  ParameterCount expected(r2);
1818  __ InvokeFunctionCode(r1, no_reg, expected, actual, JUMP_FUNCTION);
1819 
1820  // The function is a "classConstructor", need to raise an exception.
1821  __ bind(&class_constructor);
1822  {
1823  FrameScope frame(masm, StackFrame::INTERNAL);
1824  __ push(r1);
1825  __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
1826  }
1827 }
1828 
1829 namespace {
1830 
1831 void Generate_PushBoundArguments(MacroAssembler* masm) {
1832  // ----------- S t a t e -------------
1833  // -- r0 : the number of arguments (not including the receiver)
1834  // -- r1 : target (checked to be a JSBoundFunction)
1835  // -- r3 : new.target (only in case of [[Construct]])
1836  // -----------------------------------
1837 
1838  // Load [[BoundArguments]] into r2 and length of that into r4.
1839  Label no_bound_arguments;
1840  __ ldr(r2, FieldMemOperand(r1, JSBoundFunction::kBoundArgumentsOffset));
1841  __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
1842  __ SmiUntag(r4);
1843  __ cmp(r4, Operand(0));
1844  __ b(eq, &no_bound_arguments);
1845  {
1846  // ----------- S t a t e -------------
1847  // -- r0 : the number of arguments (not including the receiver)
1848  // -- r1 : target (checked to be a JSBoundFunction)
1849  // -- r2 : the [[BoundArguments]] (implemented as FixedArray)
1850  // -- r3 : new.target (only in case of [[Construct]])
1851  // -- r4 : the number of [[BoundArguments]]
1852  // -----------------------------------
1853 
1854  // Reserve stack space for the [[BoundArguments]].
1855  {
1856  Label done;
1857  __ sub(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
1858  // Check the stack for overflow. We are not trying to catch interruptions
1859  // (i.e. debug break and preemption) here, so check the "real stack
1860  // limit".
1861  __ CompareRoot(sp, RootIndex::kRealStackLimit);
1862  __ b(hs, &done);
1863  // Restore the stack pointer.
1864  __ add(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
1865  {
1866  FrameScope scope(masm, StackFrame::MANUAL);
1867  __ EnterFrame(StackFrame::INTERNAL);
1868  __ CallRuntime(Runtime::kThrowStackOverflow);
1869  }
1870  __ bind(&done);
1871  }
1872 
1873  Register scratch = r6;
1874 
1875  // Relocate arguments down the stack.
1876  {
1877  Label loop, done_loop;
1878  __ mov(r5, Operand(0));
1879  __ bind(&loop);
1880  __ cmp(r5, r0);
1881  __ b(gt, &done_loop);
1882  __ ldr(scratch, MemOperand(sp, r4, LSL, kPointerSizeLog2));
1883  __ str(scratch, MemOperand(sp, r5, LSL, kPointerSizeLog2));
1884  __ add(r4, r4, Operand(1));
1885  __ add(r5, r5, Operand(1));
1886  __ b(&loop);
1887  __ bind(&done_loop);
1888  }
1889 
1890  // Copy [[BoundArguments]] to the stack (below the arguments).
1891  {
1892  Label loop;
1893  __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
1894  __ SmiUntag(r4);
1895  __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1896  __ bind(&loop);
1897  __ sub(r4, r4, Operand(1), SetCC);
1898  __ ldr(scratch, MemOperand(r2, r4, LSL, kPointerSizeLog2));
1899  __ str(scratch, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1900  __ add(r0, r0, Operand(1));
1901  __ b(gt, &loop);
1902  }
1903  }
1904  __ bind(&no_bound_arguments);
1905 }
1906 
1907 } // namespace
1908 
1909 // static
1910 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
1911  // ----------- S t a t e -------------
1912  // -- r0 : the number of arguments (not including the receiver)
1913  // -- r1 : the function to call (checked to be a JSBoundFunction)
1914  // -----------------------------------
1915  __ AssertBoundFunction(r1);
1916 
1917  // Patch the receiver to [[BoundThis]].
1918  __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundThisOffset));
1919  __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1920 
1921  // Push the [[BoundArguments]] onto the stack.
1922  Generate_PushBoundArguments(masm);
1923 
1924  // Call the [[BoundTargetFunction]] via the Call builtin.
1925  __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
1926  __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
1927  RelocInfo::CODE_TARGET);
1928 }
1929 
1930 // static
1931 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
1932  // ----------- S t a t e -------------
1933  // -- r0 : the number of arguments (not including the receiver)
1934  // -- r1 : the target to call (can be any Object).
1935  // -----------------------------------
1936 
1937  Label non_callable, non_function, non_smi;
1938  __ JumpIfSmi(r1, &non_callable);
1939  __ bind(&non_smi);
1940  __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
1941  __ Jump(masm->isolate()->builtins()->CallFunction(mode),
1942  RelocInfo::CODE_TARGET, eq);
1943  __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
1944  __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
1945  RelocInfo::CODE_TARGET, eq);
1946 
1947  // Check if target has a [[Call]] internal method.
1948  __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1949  __ tst(r4, Operand(Map::IsCallableBit::kMask));
1950  __ b(eq, &non_callable);
1951 
1952  // Check if target is a proxy and call CallProxy external builtin
1953  __ cmp(r5, Operand(JS_PROXY_TYPE));
1954  __ b(ne, &non_function);
1955  __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
1956 
1957  // 2. Call to something else, which might have a [[Call]] internal method (if
1958  // not we raise an exception).
1959  __ bind(&non_function);
1960  // Overwrite the original receiver the (original) target.
1961  __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1962  // Let the "call_as_function_delegate" take care of the rest.
1963  __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r1);
1964  __ Jump(masm->isolate()->builtins()->CallFunction(
1965  ConvertReceiverMode::kNotNullOrUndefined),
1966  RelocInfo::CODE_TARGET);
1967 
1968  // 3. Call to something that is not callable.
1969  __ bind(&non_callable);
1970  {
1971  FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1972  __ Push(r1);
1973  __ CallRuntime(Runtime::kThrowCalledNonCallable);
1974  }
1975 }
1976 
1977 // static
1978 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
1979  // ----------- S t a t e -------------
1980  // -- r0 : the number of arguments (not including the receiver)
1981  // -- r1 : the constructor to call (checked to be a JSFunction)
1982  // -- r3 : the new target (checked to be a constructor)
1983  // -----------------------------------
1984  __ AssertConstructor(r1);
1985  __ AssertFunction(r1);
1986 
1987  // Calling convention for function specific ConstructStubs require
1988  // r2 to contain either an AllocationSite or undefined.
1989  __ LoadRoot(r2, RootIndex::kUndefinedValue);
1990 
1991  Label call_generic_stub;
1992 
1993  // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
1994  __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1995  __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kFlagsOffset));
1996  __ tst(r4, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1997  __ b(eq, &call_generic_stub);
1998 
1999  __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2000  RelocInfo::CODE_TARGET);
2001 
2002  __ bind(&call_generic_stub);
2003  __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2004  RelocInfo::CODE_TARGET);
2005 }
2006 
2007 // static
2008 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2009  // ----------- S t a t e -------------
2010  // -- r0 : the number of arguments (not including the receiver)
2011  // -- r1 : the function to call (checked to be a JSBoundFunction)
2012  // -- r3 : the new target (checked to be a constructor)
2013  // -----------------------------------
2014  __ AssertConstructor(r1);
2015  __ AssertBoundFunction(r1);
2016 
2017  // Push the [[BoundArguments]] onto the stack.
2018  Generate_PushBoundArguments(masm);
2019 
2020  // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2021  __ cmp(r1, r3);
2022  __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset),
2023  eq);
2024 
2025  // Construct the [[BoundTargetFunction]] via the Construct builtin.
2026  __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2027  __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2028 }
2029 
2030 // static
2031 void Builtins::Generate_Construct(MacroAssembler* masm) {
2032  // ----------- S t a t e -------------
2033  // -- r0 : the number of arguments (not including the receiver)
2034  // -- r1 : the constructor to call (can be any Object)
2035  // -- r3 : the new target (either the same as the constructor or
2036  // the JSFunction on which new was invoked initially)
2037  // -----------------------------------
2038 
2039  // Check if target is a Smi.
2040  Label non_constructor, non_proxy;
2041  __ JumpIfSmi(r1, &non_constructor);
2042 
2043  // Check if target has a [[Construct]] internal method.
2044  __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
2045  __ ldrb(r2, FieldMemOperand(r4, Map::kBitFieldOffset));
2046  __ tst(r2, Operand(Map::IsConstructorBit::kMask));
2047  __ b(eq, &non_constructor);
2048 
2049  // Dispatch based on instance type.
2050  __ CompareInstanceType(r4, r5, JS_FUNCTION_TYPE);
2051  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2052  RelocInfo::CODE_TARGET, eq);
2053 
2054  // Only dispatch to bound functions after checking whether they are
2055  // constructors.
2056  __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
2057  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2058  RelocInfo::CODE_TARGET, eq);
2059 
2060  // Only dispatch to proxies after checking whether they are constructors.
2061  __ cmp(r5, Operand(JS_PROXY_TYPE));
2062  __ b(ne, &non_proxy);
2063  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2064  RelocInfo::CODE_TARGET);
2065 
2066  // Called Construct on an exotic Object with a [[Construct]] internal method.
2067  __ bind(&non_proxy);
2068  {
2069  // Overwrite the original receiver with the (original) target.
2070  __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2071  // Let the "call_as_constructor_delegate" take care of the rest.
2072  __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r1);
2073  __ Jump(masm->isolate()->builtins()->CallFunction(),
2074  RelocInfo::CODE_TARGET);
2075  }
2076 
2077  // Called Construct on an Object that doesn't have a [[Construct]] internal
2078  // method.
2079  __ bind(&non_constructor);
2080  __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2081  RelocInfo::CODE_TARGET);
2082 }
2083 
2084 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2085  // ----------- S t a t e -------------
2086  // -- r0 : actual number of arguments
2087  // -- r1 : function (passed through to callee)
2088  // -- r2 : expected number of arguments
2089  // -- r3 : new target (passed through to callee)
2090  // -----------------------------------
2091 
2092  Label invoke, dont_adapt_arguments, stack_overflow;
2093 
2094  Label enough, too_few;
2095  __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2096  __ b(eq, &dont_adapt_arguments);
2097  __ cmp(r0, r2);
2098  __ b(lt, &too_few);
2099 
2100  Register scratch = r5;
2101 
2102  { // Enough parameters: actual >= expected
2103  __ bind(&enough);
2104  EnterArgumentsAdaptorFrame(masm);
2105  Generate_StackOverflowCheck(masm, r2, scratch, &stack_overflow);
2106 
2107  // Calculate copy start address into r0 and copy end address into r4.
2108  // r0: actual number of arguments as a smi
2109  // r1: function
2110  // r2: expected number of arguments
2111  // r3: new target (passed through to callee)
2112  __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
2113  // adjust for return address and receiver
2114  __ add(r0, r0, Operand(2 * kPointerSize));
2115  __ sub(r4, r0, Operand(r2, LSL, kPointerSizeLog2));
2116 
2117  // Copy the arguments (including the receiver) to the new stack frame.
2118  // r0: copy start address
2119  // r1: function
2120  // r2: expected number of arguments
2121  // r3: new target (passed through to callee)
2122  // r4: copy end address
2123 
2124  Label copy;
2125  __ bind(&copy);
2126  __ ldr(scratch, MemOperand(r0, 0));
2127  __ push(scratch);
2128  __ cmp(r0, r4); // Compare before moving to next argument.
2129  __ sub(r0, r0, Operand(kPointerSize));
2130  __ b(ne, &copy);
2131 
2132  __ b(&invoke);
2133  }
2134 
2135  { // Too few parameters: Actual < expected
2136  __ bind(&too_few);
2137  EnterArgumentsAdaptorFrame(masm);
2138  Generate_StackOverflowCheck(masm, r2, scratch, &stack_overflow);
2139 
2140  // Calculate copy start address into r0 and copy end address is fp.
2141  // r0: actual number of arguments as a smi
2142  // r1: function
2143  // r2: expected number of arguments
2144  // r3: new target (passed through to callee)
2145  __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
2146 
2147  // Copy the arguments (including the receiver) to the new stack frame.
2148  // r0: copy start address
2149  // r1: function
2150  // r2: expected number of arguments
2151  // r3: new target (passed through to callee)
2152  Label copy;
2153  __ bind(&copy);
2154 
2155  // Adjust load for return address and receiver.
2156  __ ldr(scratch, MemOperand(r0, 2 * kPointerSize));
2157  __ push(scratch);
2158 
2159  __ cmp(r0, fp); // Compare before moving to next argument.
2160  __ sub(r0, r0, Operand(kPointerSize));
2161  __ b(ne, &copy);
2162 
2163  // Fill the remaining expected arguments with undefined.
2164  // r1: function
2165  // r2: expected number of arguments
2166  // r3: new target (passed through to callee)
2167  __ LoadRoot(scratch, RootIndex::kUndefinedValue);
2168  __ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2));
2169  // Adjust for frame.
2170  __ sub(r4, r4,
2171  Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
2172  kPointerSize));
2173 
2174  Label fill;
2175  __ bind(&fill);
2176  __ push(scratch);
2177  __ cmp(sp, r4);
2178  __ b(ne, &fill);
2179  }
2180 
2181  // Call the entry point.
2182  __ bind(&invoke);
2183  __ mov(r0, r2);
2184  // r0 : expected number of arguments
2185  // r1 : function (passed through to callee)
2186  // r3 : new target (passed through to callee)
2187  static_assert(kJavaScriptCallCodeStartRegister == r2, "ABI mismatch");
2188  __ ldr(r2, FieldMemOperand(r1, JSFunction::kCodeOffset));
2189  __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
2190  __ Call(r2);
2191 
2192  // Store offset of return address for deoptimizer.
2193  masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2194 
2195  // Exit frame and return.
2196  LeaveArgumentsAdaptorFrame(masm);
2197  __ Jump(lr);
2198 
2199  // -------------------------------------------
2200  // Dont adapt arguments.
2201  // -------------------------------------------
2202  __ bind(&dont_adapt_arguments);
2203  static_assert(kJavaScriptCallCodeStartRegister == r2, "ABI mismatch");
2204  __ ldr(r2, FieldMemOperand(r1, JSFunction::kCodeOffset));
2205  __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
2206  __ Jump(r2);
2207 
2208  __ bind(&stack_overflow);
2209  {
2210  FrameScope frame(masm, StackFrame::MANUAL);
2211  __ CallRuntime(Runtime::kThrowStackOverflow);
2212  __ bkpt(0);
2213  }
2214 }
2215 
2216 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2217  // The function index was put in a register by the jump table trampoline.
2218  // Convert to Smi for the runtime call.
2219  __ SmiTag(kWasmCompileLazyFuncIndexRegister,
2220  kWasmCompileLazyFuncIndexRegister);
2221  {
2222  HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2223  FrameAndConstantPoolScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2224 
2225  // Save all parameter registers (see wasm-linkage.cc). They might be
2226  // overwritten in the runtime call below. We don't have any callee-saved
2227  // registers in wasm, so no need to store anything else.
2228  constexpr RegList gp_regs = Register::ListOf<r0, r1, r2, r3>();
2229  constexpr DwVfpRegister lowest_fp_reg = d0;
2230  constexpr DwVfpRegister highest_fp_reg = d7;
2231 
2232  __ stm(db_w, sp, gp_regs);
2233  __ vstm(db_w, sp, lowest_fp_reg, highest_fp_reg);
2234 
2235  // Pass instance and function index as explicit arguments to the runtime
2236  // function.
2237  __ push(kWasmInstanceRegister);
2238  __ push(kWasmCompileLazyFuncIndexRegister);
2239  // Load the correct CEntry builtin from the instance object.
2240  __ ldr(r2, FieldMemOperand(kWasmInstanceRegister,
2241  WasmInstanceObject::kCEntryStubOffset));
2242  // Initialize the JavaScript context with 0. CEntry will use it to
2243  // set the current context on the isolate.
2244  __ Move(cp, Smi::zero());
2245  __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, r2);
2246  // The entrypoint address is the return value.
2247  __ mov(r8, kReturnRegister0);
2248 
2249  // Restore registers.
2250  __ vldm(ia_w, sp, lowest_fp_reg, highest_fp_reg);
2251  __ ldm(ia_w, sp, gp_regs);
2252  }
2253  // Finally, jump to the entrypoint.
2254  __ Jump(r8);
2255 }
2256 
2257 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2258  SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2259  bool builtin_exit_frame) {
2260  // Called from JavaScript; parameters are on stack as if calling JS function.
2261  // r0: number of arguments including receiver
2262  // r1: pointer to builtin function
2263  // fp: frame pointer (restored after C call)
2264  // sp: stack pointer (restored as callee's sp after C call)
2265  // cp: current context (C callee-saved)
2266  //
2267  // If argv_mode == kArgvInRegister:
2268  // r2: pointer to the first argument
2269 
2270  __ mov(r5, Operand(r1));
2271 
2272  if (argv_mode == kArgvInRegister) {
2273  // Move argv into the correct register.
2274  __ mov(r1, Operand(r2));
2275  } else {
2276  // Compute the argv pointer in a callee-saved register.
2277  __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
2278  __ sub(r1, r1, Operand(kPointerSize));
2279  }
2280 
2281  // Enter the exit frame that transitions from JavaScript to C++.
2282  FrameScope scope(masm, StackFrame::MANUAL);
2283  __ EnterExitFrame(
2284  save_doubles == kSaveFPRegs, 0,
2285  builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2286 
2287  // Store a copy of argc in callee-saved registers for later.
2288  __ mov(r4, Operand(r0));
2289 
2290 // r0, r4: number of arguments including receiver (C callee-saved)
2291 // r1: pointer to the first argument (C callee-saved)
2292 // r5: pointer to builtin function (C callee-saved)
2293 
2294 #if V8_HOST_ARCH_ARM
2295  int frame_alignment = MacroAssembler::ActivationFrameAlignment();
2296  int frame_alignment_mask = frame_alignment - 1;
2297  if (FLAG_debug_code) {
2298  if (frame_alignment > kPointerSize) {
2299  Label alignment_as_expected;
2300  DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
2301  __ tst(sp, Operand(frame_alignment_mask));
2302  __ b(eq, &alignment_as_expected);
2303  // Don't use Check here, as it will call Runtime_Abort re-entering here.
2304  __ stop("Unexpected alignment");
2305  __ bind(&alignment_as_expected);
2306  }
2307  }
2308 #endif
2309 
2310  // Call C built-in.
2311  // r0 = argc, r1 = argv, r2 = isolate
2312  __ Move(r2, ExternalReference::isolate_address(masm->isolate()));
2313 
2314  // To let the GC traverse the return address of the exit frames, we need to
2315  // know where the return address is. CEntry is unmovable, so
2316  // we can store the address on the stack to be able to find it again and
2317  // we never have to restore it, because it will not change.
2318  // Compute the return address in lr to return to after the jump below. Pc is
2319  // already at '+ 8' from the current instruction but return is after three
2320  // instructions so add another 4 to pc to get the return address.
2321  {
2322  // Prevent literal pool emission before return address.
2323  Assembler::BlockConstPoolScope block_const_pool(masm);
2324  __ add(lr, pc, Operand(4));
2325  __ str(lr, MemOperand(sp));
2326  __ Call(r5);
2327  }
2328 
2329  // Result returned in r0 or r1:r0 - do not destroy these registers!
2330 
2331  // Check result for exception sentinel.
2332  Label exception_returned;
2333  __ CompareRoot(r0, RootIndex::kException);
2334  __ b(eq, &exception_returned);
2335 
2336  // Check that there is no pending exception, otherwise we
2337  // should have returned the exception sentinel.
2338  if (FLAG_debug_code) {
2339  Label okay;
2340  ExternalReference pending_exception_address = ExternalReference::Create(
2341  IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2342  __ Move(r3, pending_exception_address);
2343  __ ldr(r3, MemOperand(r3));
2344  __ CompareRoot(r3, RootIndex::kTheHoleValue);
2345  // Cannot use check here as it attempts to generate call into runtime.
2346  __ b(eq, &okay);
2347  __ stop("Unexpected pending exception");
2348  __ bind(&okay);
2349  }
2350 
2351  // Exit C frame and return.
2352  // r0:r1: result
2353  // sp: stack pointer
2354  // fp: frame pointer
2355  Register argc = argv_mode == kArgvInRegister
2356  // We don't want to pop arguments so set argc to no_reg.
2357  ? no_reg
2358  // Callee-saved register r4 still holds argc.
2359  : r4;
2360  __ LeaveExitFrame(save_doubles == kSaveFPRegs, argc);
2361  __ mov(pc, lr);
2362 
2363  // Handling of exception.
2364  __ bind(&exception_returned);
2365 
2366  ExternalReference pending_handler_context_address = ExternalReference::Create(
2367  IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2368  ExternalReference pending_handler_entrypoint_address =
2369  ExternalReference::Create(
2370  IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2371  ExternalReference pending_handler_fp_address = ExternalReference::Create(
2372  IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2373  ExternalReference pending_handler_sp_address = ExternalReference::Create(
2374  IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2375 
2376  // Ask the runtime for help to determine the handler. This will set r0 to
2377  // contain the current pending exception, don't clobber it.
2378  ExternalReference find_handler =
2379  ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2380  {
2381  FrameScope scope(masm, StackFrame::MANUAL);
2382  __ PrepareCallCFunction(3, 0);
2383  __ mov(r0, Operand(0));
2384  __ mov(r1, Operand(0));
2385  __ Move(r2, ExternalReference::isolate_address(masm->isolate()));
2386  __ CallCFunction(find_handler, 3);
2387  }
2388 
2389  // Retrieve the handler context, SP and FP.
2390  __ Move(cp, pending_handler_context_address);
2391  __ ldr(cp, MemOperand(cp));
2392  __ Move(sp, pending_handler_sp_address);
2393  __ ldr(sp, MemOperand(sp));
2394  __ Move(fp, pending_handler_fp_address);
2395  __ ldr(fp, MemOperand(fp));
2396 
2397  // If the handler is a JS frame, restore the context to the frame. Note that
2398  // the context will be set to (cp == 0) for non-JS frames.
2399  __ cmp(cp, Operand(0));
2400  __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
2401 
2402  // Reset the masking register. This is done independent of the underlying
2403  // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
2404  // with both configurations. It is safe to always do this, because the
2405  // underlying register is caller-saved and can be arbitrarily clobbered.
2406  __ ResetSpeculationPoisonRegister();
2407 
2408  // Compute the handler entry address and jump to it.
2409  ConstantPoolUnavailableScope constant_pool_unavailable(masm);
2410  __ Move(r1, pending_handler_entrypoint_address);
2411  __ ldr(r1, MemOperand(r1));
2412  __ Jump(r1);
2413 }
2414 
2415 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2416  Label negate, done;
2417 
2418  HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2419  UseScratchRegisterScope temps(masm);
2420  Register result_reg = r7;
2421  Register double_low = GetRegisterThatIsNotOneOf(result_reg);
2422  Register double_high = GetRegisterThatIsNotOneOf(result_reg, double_low);
2423  LowDwVfpRegister double_scratch = temps.AcquireLowD();
2424 
2425  // Save the old values from these temporary registers on the stack.
2426  __ Push(result_reg, double_high, double_low);
2427 
2428  // Account for saved regs.
2429  const int kArgumentOffset = 3 * kPointerSize;
2430 
2431  MemOperand input_operand(sp, kArgumentOffset);
2432  MemOperand result_operand = input_operand;
2433 
2434  // Load double input.
2435  __ vldr(double_scratch, input_operand);
2436  __ vmov(double_low, double_high, double_scratch);
2437  // Try to convert with a FPU convert instruction. This handles all
2438  // non-saturating cases.
2439  __ TryInlineTruncateDoubleToI(result_reg, double_scratch, &done);
2440 
2441  Register scratch = temps.Acquire();
2442  __ Ubfx(scratch, double_high, HeapNumber::kExponentShift,
2443  HeapNumber::kExponentBits);
2444  // Load scratch with exponent - 1. This is faster than loading
2445  // with exponent because Bias + 1 = 1024 which is an *ARM* immediate value.
2446  STATIC_ASSERT(HeapNumber::kExponentBias + 1 == 1024);
2447  __ sub(scratch, scratch, Operand(HeapNumber::kExponentBias + 1));
2448  // If exponent is greater than or equal to 84, the 32 less significant
2449  // bits are 0s (2^84 = 1, 52 significant bits, 32 uncoded bits),
2450  // the result is 0.
2451  // Compare exponent with 84 (compare exponent - 1 with 83). If the exponent is
2452  // greater than this, the conversion is out of range, so return zero.
2453  __ cmp(scratch, Operand(83));
2454  __ mov(result_reg, Operand::Zero(), LeaveCC, ge);
2455  __ b(ge, &done);
2456 
2457  // If we reach this code, 30 <= exponent <= 83.
2458  // `TryInlineTruncateDoubleToI` above will have truncated any double with an
2459  // exponent lower than 30.
2460  if (masm->emit_debug_code()) {
2461  // Scratch is exponent - 1.
2462  __ cmp(scratch, Operand(30 - 1));
2463  __ Check(ge, AbortReason::kUnexpectedValue);
2464  }
2465 
2466  // We don't have to handle cases where 0 <= exponent <= 20 for which we would
2467  // need to shift right the high part of the mantissa.
2468  // Scratch contains exponent - 1.
2469  // Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
2470  __ rsb(scratch, scratch, Operand(51), SetCC);
2471 
2472  // 52 <= exponent <= 83, shift only double_low.
2473  // On entry, scratch contains: 52 - exponent.
2474  __ rsb(scratch, scratch, Operand::Zero(), LeaveCC, ls);
2475  __ mov(result_reg, Operand(double_low, LSL, scratch), LeaveCC, ls);
2476  __ b(ls, &negate);
2477 
2478  // 21 <= exponent <= 51, shift double_low and double_high
2479  // to generate the result.
2480  __ mov(double_low, Operand(double_low, LSR, scratch));
2481  // Scratch contains: 52 - exponent.
2482  // We needs: exponent - 20.
2483  // So we use: 32 - scratch = 32 - 52 + exponent = exponent - 20.
2484  __ rsb(scratch, scratch, Operand(32));
2485  __ Ubfx(result_reg, double_high, 0, HeapNumber::kMantissaBitsInTopWord);
2486  // Set the implicit 1 before the mantissa part in double_high.
2487  __ orr(result_reg, result_reg,
2488  Operand(1 << HeapNumber::kMantissaBitsInTopWord));
2489  __ orr(result_reg, double_low, Operand(result_reg, LSL, scratch));
2490 
2491  __ bind(&negate);
2492  // If input was positive, double_high ASR 31 equals 0 and
2493  // double_high LSR 31 equals zero.
2494  // New result = (result eor 0) + 0 = result.
2495  // If the input was negative, we have to negate the result.
2496  // Input_high ASR 31 equals 0xFFFFFFFF and double_high LSR 31 equals 1.
2497  // New result = (result eor 0xFFFFFFFF) + 1 = 0 - result.
2498  __ eor(result_reg, result_reg, Operand(double_high, ASR, 31));
2499  __ add(result_reg, result_reg, Operand(double_high, LSR, 31));
2500 
2501  __ bind(&done);
2502  __ str(result_reg, result_operand);
2503 
2504  // Restore registers corrupted in this routine and return.
2505  __ Pop(result_reg, double_high, double_low);
2506  __ Ret();
2507 }
2508 
2509 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2510  const LowDwVfpRegister double_base = d0;
2511  const LowDwVfpRegister double_exponent = d1;
2512  const LowDwVfpRegister double_result = d2;
2513  const LowDwVfpRegister double_scratch = d3;
2514  const SwVfpRegister single_scratch = s6;
2515  // Avoid using Registers r0-r3 as they may be needed when calling to C if the
2516  // ABI is softfloat.
2517  const Register integer_exponent = r4;
2518  const Register scratch = r5;
2519 
2520  Label call_runtime, done, int_exponent;
2521 
2522  // Detect integer exponents stored as double.
2523  __ TryDoubleToInt32Exact(integer_exponent, double_exponent, double_scratch);
2524  __ b(eq, &int_exponent);
2525 
2526  __ push(lr);
2527  {
2528  AllowExternalCallThatCantCauseGC scope(masm);
2529  __ PrepareCallCFunction(0, 2);
2530  __ MovToFloatParameters(double_base, double_exponent);
2531  __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2532  }
2533  __ pop(lr);
2534  __ MovFromFloatResult(double_result);
2535  __ b(&done);
2536 
2537  // Calculate power with integer exponent.
2538  __ bind(&int_exponent);
2539 
2540  __ vmov(double_scratch, double_base); // Back up base.
2541  __ vmov(double_result, Double(1.0), scratch);
2542 
2543  // Get absolute value of exponent.
2544  __ cmp(integer_exponent, Operand::Zero());
2545  __ mov(scratch, integer_exponent);
2546  __ rsb(scratch, integer_exponent, Operand::Zero(), LeaveCC, mi);
2547 
2548  Label while_true;
2549  __ bind(&while_true);
2550  __ mov(scratch, Operand(scratch, LSR, 1), SetCC);
2551  __ vmul(double_result, double_result, double_scratch, cs);
2552  __ vmul(double_scratch, double_scratch, double_scratch, ne);
2553  __ b(ne, &while_true);
2554 
2555  __ cmp(integer_exponent, Operand::Zero());
2556  __ b(ge, &done);
2557  __ vmov(double_scratch, Double(1.0), scratch);
2558  __ vdiv(double_result, double_scratch, double_result);
2559  // Test whether result is zero. Bail out to check for subnormal result.
2560  // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
2561  __ VFPCompareAndSetFlags(double_result, 0.0);
2562  __ b(ne, &done);
2563  // double_exponent may not containe the exponent value if the input was a
2564  // smi. We set it with exponent value before bailing out.
2565  __ vmov(single_scratch, integer_exponent);
2566  __ vcvt_f64_s32(double_exponent, single_scratch);
2567 
2568  // Returning or bailing out.
2569  __ push(lr);
2570  {
2571  AllowExternalCallThatCantCauseGC scope(masm);
2572  __ PrepareCallCFunction(0, 2);
2573  __ MovToFloatParameters(double_base, double_exponent);
2574  __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2575  }
2576  __ pop(lr);
2577  __ MovFromFloatResult(double_result);
2578 
2579  __ bind(&done);
2580  __ Ret();
2581 }
2582 
2583 namespace {
2584 
2585 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2586  ElementsKind kind) {
2587  // Load undefined into the allocation site parameter as required by
2588  // ArrayNArgumentsConstructor.
2589  __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue);
2590 
2591  __ cmp(r0, Operand(1));
2592 
2593  __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2594  .code(),
2595  RelocInfo::CODE_TARGET, lo);
2596 
2597  Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
2598  __ Jump(code, RelocInfo::CODE_TARGET, hi);
2599 
2600  if (IsFastPackedElementsKind(kind)) {
2601  // We might need to create a holey array
2602  // look at the first argument
2603  __ ldr(r3, MemOperand(sp, 0));
2604  __ cmp(r3, Operand::Zero());
2605 
2606  __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2607  masm->isolate(), GetHoleyElementsKind(kind))
2608  .code(),
2609  RelocInfo::CODE_TARGET, ne);
2610  }
2611 
2612  __ Jump(
2613  CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2614  .code(),
2615  RelocInfo::CODE_TARGET);
2616 }
2617 
2618 } // namespace
2619 
2620 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2621  // ----------- S t a t e -------------
2622  // -- r0 : argc
2623  // -- r1 : constructor
2624  // -- sp[0] : return address
2625  // -- sp[4] : last argument
2626  // -----------------------------------
2627 
2628  if (FLAG_debug_code) {
2629  // The array construct code is only set for the global and natives
2630  // builtin Array functions which always have maps.
2631 
2632  // Initial map for the builtin Array function should be a map.
2633  __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
2634  // Will both indicate a nullptr and a Smi.
2635  __ tst(r3, Operand(kSmiTagMask));
2636  __ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction);
2637  __ CompareObjectType(r3, r3, r4, MAP_TYPE);
2638  __ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction);
2639  }
2640 
2641  // Figure out the right elements kind
2642  __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
2643  // Load the map's "bit field 2" into |result|. We only need the first byte,
2644  // but the following bit field extraction takes care of that anyway.
2645  __ ldr(r3, FieldMemOperand(r3, Map::kBitField2Offset));
2646  // Retrieve elements_kind from bit field 2.
2647  __ DecodeField<Map::ElementsKindBits>(r3);
2648 
2649  if (FLAG_debug_code) {
2650  Label done;
2651  __ cmp(r3, Operand(PACKED_ELEMENTS));
2652  __ b(eq, &done);
2653  __ cmp(r3, Operand(HOLEY_ELEMENTS));
2654  __ Assert(
2655  eq,
2656  AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2657  __ bind(&done);
2658  }
2659 
2660  Label fast_elements_case;
2661  __ cmp(r3, Operand(PACKED_ELEMENTS));
2662  __ b(eq, &fast_elements_case);
2663  GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2664 
2665  __ bind(&fast_elements_case);
2666  GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2667 }
2668 
2669 #undef __
2670 
2671 } // namespace internal
2672 } // namespace v8
2673 
2674 #endif // V8_TARGET_ARCH_ARM
Definition: libplatform.h:13