5 #if V8_TARGET_ARCH_ARM64 7 #include "src/arm64/macro-assembler-arm64-inl.h" 8 #include "src/code-factory.h" 9 #include "src/code-stubs.h" 10 #include "src/counters.h" 11 #include "src/debug/debug.h" 12 #include "src/deoptimizer.h" 13 #include "src/frame-constants.h" 14 #include "src/frames.h" 15 #include "src/objects-inl.h" 16 #include "src/objects/js-generator.h" 17 #include "src/objects/smi.h" 18 #include "src/register-configuration.h" 19 #include "src/runtime/runtime.h" 20 #include "src/wasm/wasm-objects.h" 25 #define __ ACCESS_MASM(masm) 27 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
28 ExitFrameType exit_frame_type) {
29 __ Mov(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
30 if (exit_frame_type == BUILTIN_EXIT) {
31 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
32 RelocInfo::CODE_TARGET);
34 DCHECK(exit_frame_type == EXIT);
35 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
36 RelocInfo::CODE_TARGET);
40 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
46 ASM_LOCATION(
"Builtins::Generate_InternalArrayConstructor");
47 Label generic_array_code;
49 if (FLAG_debug_code) {
51 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
52 __ Tst(x10, kSmiTagMask);
53 __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
54 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
55 __ Assert(eq, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
60 __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
61 RelocInfo::CODE_TARGET);
64 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
65 Runtime::FunctionId function_id) {
72 FrameScope scope(masm, StackFrame::INTERNAL);
76 __ Push(x0, x1, x3, padreg);
79 __ CallRuntime(function_id, 1);
83 __ Pop(padreg, x3, x1, x0);
87 static_assert(kJavaScriptCallCodeStartRegister == x2,
"ABI mismatch");
88 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
94 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
95 Label post_instantiation_deopt_entry;
106 ASM_LOCATION(
"Builtins::Generate_JSConstructStubHelper");
110 FrameScope scope(masm, StackFrame::CONSTRUCT);
111 Label already_aligned;
114 if (__ emit_debug_code()) {
118 __ Check(eq, AbortReason::kUnexpectedValue);
122 __ SmiTag(x11, argc);
123 __ Push(x11, padreg);
126 Register slot_count = x2;
127 Register slot_count_without_rounding = x12;
128 __ Add(slot_count_without_rounding, argc, 2);
129 __ Bic(slot_count, slot_count_without_rounding, 1);
130 __ Claim(slot_count);
133 __ LoadRoot(x10, RootIndex::kTheHoleValue);
137 __ SlotAddress(x2, argc);
140 __ Str(x10, MemOperand(x2));
141 __ Tbnz(slot_count_without_rounding, 0, &already_aligned);
144 __ Str(padreg, MemOperand(x2, 1 * kPointerSize));
145 __ Bind(&already_aligned);
153 __ SlotAddress(dst, 0);
154 __ Add(src, fp, StandardFrameConstants::kCallerSPOffset);
155 __ CopyDoubleWords(dst, src, count);
182 ParameterCount actual(argc);
183 __ InvokeFunction(x1, x3, actual, CALL_FUNCTION);
186 __ Ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
190 __ SmiUntag(x1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
195 __ DropArguments(x1, TurboAssembler::kCountExcludesReceiver);
199 void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
200 Label* stack_overflow) {
201 UseScratchRegisterScope temps(masm);
202 Register scratch = temps.AcquireX();
207 Label enough_stack_space;
208 __ LoadRoot(scratch, RootIndex::kRealStackLimit);
211 __ Sub(scratch, sp, scratch);
213 __ Cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
214 __ B(le, stack_overflow);
216 #if defined(V8_OS_WIN) 218 const int kPageSize = 4096;
219 Label chkstk, chkstk_done;
220 Register probe = temps.AcquireX();
222 __ Sub(scratch, sp, Operand(num_args, LSL, kPointerSizeLog2));
227 __ Sub(probe, probe, kPageSize);
228 __ Cmp(probe, scratch);
229 __ B(lo, &chkstk_done);
230 __ Ldrb(xzr, MemOperand(probe));
233 __ Bind(&chkstk_done);
240 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
250 ASM_LOCATION(
"Builtins::Generate_JSConstructStubGeneric");
254 FrameScope scope(masm, StackFrame::CONSTRUCT);
255 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
257 if (__ emit_debug_code()) {
261 __ Check(eq, AbortReason::kUnexpectedValue);
266 __ Push(x0, x1, padreg, x3);
276 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
277 __ Ldr(w4, FieldMemOperand(x4, SharedFunctionInfo::kFlagsOffset));
278 __ TestAndBranchIfAnySet(w4,
279 SharedFunctionInfo::IsDerivedConstructorBit::kMask,
280 ¬_create_implicit_receiver);
283 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
285 __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
286 RelocInfo::CODE_TARGET);
287 __ B(&post_instantiation_deopt_entry);
290 __ Bind(¬_create_implicit_receiver);
291 __ LoadRoot(x0, RootIndex::kTheHoleValue);
302 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
305 __ Bind(&post_instantiation_deopt_entry);
308 __ Peek(x3, 0 * kPointerSize);
311 __ Ldr(x1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
312 __ SmiUntag(x12, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
347 Label enough_stack_space, stack_overflow;
348 Generate_StackOverflowCheck(masm, x10, &stack_overflow);
349 __ B(&enough_stack_space);
351 __ Bind(&stack_overflow);
353 __ Ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
354 __ CallRuntime(Runtime::kThrowStackOverflow);
357 __ Bind(&enough_stack_space);
366 __ SlotAddress(dst, 0);
367 __ Add(src, fp, StandardFrameConstants::kCallerSPOffset);
368 __ CopyDoubleWords(dst, src, count);
373 ParameterCount actual(x0);
374 __ InvokeFunction(x1, x3, actual, CALL_FUNCTION);
385 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
389 __ Ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
394 Label use_receiver, do_throw, leave_frame;
397 __ CompareRoot(x0, RootIndex::kUndefinedValue);
398 __ B(eq, &use_receiver);
404 __ JumpIfSmi(x0, &use_receiver);
408 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
409 __ JumpIfObjectType(x0, x4, x5, FIRST_JS_RECEIVER_TYPE, &leave_frame, ge);
413 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
417 __ Bind(&use_receiver);
418 __ Peek(x0, 0 * kPointerSize);
419 __ CompareRoot(x0, RootIndex::kTheHoleValue);
422 __ Bind(&leave_frame);
424 __ SmiUntag(x1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
428 __ DropArguments(x1, TurboAssembler::kCountExcludesReceiver);
431 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
432 Generate_JSBuiltinsConstructStubHelper(masm);
435 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
436 FrameScope scope(masm, StackFrame::INTERNAL);
438 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
442 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
448 __ AssertGeneratorObject(x1);
451 __ Str(x0, FieldMemOperand(x1, JSGeneratorObject::kInputOrDebugPosOffset));
452 __ RecordWriteField(x1, JSGeneratorObject::kInputOrDebugPosOffset, x0, x3,
453 kLRHasNotBeenSaved, kDontSaveFPRegs);
456 __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
457 __ Ldr(cp, FieldMemOperand(x4, JSFunction::kContextOffset));
460 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
461 Label stepping_prepared;
462 ExternalReference debug_hook =
463 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
464 __ Mov(x10, debug_hook);
465 __ Ldrsb(x10, MemOperand(x10));
466 __ CompareAndBranch(x10, Operand(0), ne, &prepare_step_in_if_stepping);
469 ExternalReference debug_suspended_generator =
470 ExternalReference::debug_suspended_generator_address(masm->isolate());
471 __ Mov(x10, debug_suspended_generator);
472 __ Ldr(x10, MemOperand(x10));
473 __ CompareAndBranch(x10, Operand(x1), eq,
474 &prepare_step_in_suspended_generator);
475 __ Bind(&stepping_prepared);
479 Label stack_overflow;
480 __ CompareRoot(sp, RootIndex::kRealStackLimit);
481 __ B(lo, &stack_overflow);
484 __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
485 __ Ldrh(w10, FieldMemOperand(
486 x10, SharedFunctionInfo::kFormalParameterCountOffset));
495 __ Poke(padreg, Operand(x11, LSL, kPointerSizeLog2));
498 __ Ldr(x5, FieldMemOperand(x1, JSGeneratorObject::kReceiverOffset));
499 __ Poke(x5, Operand(x10, LSL, kPointerSizeLog2));
514 FieldMemOperand(x1, JSGeneratorObject::kParametersAndRegistersOffset));
522 __ Add(x11, x5, Operand(x12, LSL, kPointerSizeLog2));
523 __ Ldr(x11, FieldMemOperand(x11, FixedArray::kHeaderSize));
524 __ Poke(x11, Operand(x10, LSL, kPointerSizeLog2));
531 if (FLAG_debug_code) {
532 Label check_has_bytecode_array;
533 __ Ldr(x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
534 __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
535 __ CompareObjectType(x3, x0, x0, INTERPRETER_DATA_TYPE);
536 __ B(ne, &check_has_bytecode_array);
537 __ Ldr(x3, FieldMemOperand(x3, InterpreterData::kBytecodeArrayOffset));
538 __ Bind(&check_has_bytecode_array);
539 __ CompareObjectType(x3, x3, x3, BYTECODE_ARRAY_TYPE);
540 __ Assert(eq, AbortReason::kMissingBytecodeArray);
545 __ Ldr(x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
546 __ Ldrh(w0, FieldMemOperand(
547 x0, SharedFunctionInfo::kFormalParameterCountOffset));
553 static_assert(kJavaScriptCallCodeStartRegister == x2,
"ABI mismatch");
554 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kCodeOffset));
555 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
559 __ Bind(&prepare_step_in_if_stepping);
561 FrameScope scope(masm, StackFrame::INTERNAL);
563 __ LoadRoot(x5, RootIndex::kTheHoleValue);
564 __ Push(x1, padreg, x4, x5);
565 __ CallRuntime(Runtime::kDebugOnFunctionCall);
567 __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
569 __ B(&stepping_prepared);
571 __ Bind(&prepare_step_in_suspended_generator);
573 FrameScope scope(masm, StackFrame::INTERNAL);
575 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
577 __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
579 __ B(&stepping_prepared);
581 __ bind(&stack_overflow);
583 FrameScope scope(masm, StackFrame::INTERNAL);
584 __ CallRuntime(Runtime::kThrowStackOverflow);
598 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
600 Register new_target = x0;
601 Register
function = x1;
602 Register receiver = x2;
605 Register scratch = x10;
606 Register slots_to_claim = x11;
610 FrameScope scope(masm, StackFrame::INTERNAL);
613 __ Mov(scratch, ExternalReference::Create(IsolateAddressId::kContextAddress,
615 __ Ldr(cp, MemOperand(scratch));
619 __ Add(slots_to_claim, argc, 3);
620 __ Bic(slots_to_claim, slots_to_claim, 1);
623 Label enough_stack_space, stack_overflow;
624 Generate_StackOverflowCheck(masm, slots_to_claim, &stack_overflow);
625 __ B(&enough_stack_space);
627 __ Bind(&stack_overflow);
628 __ CallRuntime(Runtime::kThrowStackOverflow);
631 __ Bind(&enough_stack_space);
632 __ Claim(slots_to_claim);
635 __ SlotAddress(scratch, slots_to_claim);
636 __ Str(padreg, MemOperand(scratch, -kPointerSize));
639 __ SlotAddress(scratch, argc);
640 __ Stp(receiver,
function, MemOperand(scratch));
655 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
657 __ Ldr(x11, MemOperand(x11));
659 __ Str(x11, MemOperand(scratch, -kPointerSize, PreIndex));
666 __ Mov(scratch, argc);
667 __ Mov(argc, new_target);
668 __ Mov(new_target, scratch);
675 __ LoadRoot(x19, RootIndex::kUndefinedValue);
688 Handle<Code> builtin = is_construct
689 ? BUILTIN_CODE(masm->isolate(), Construct)
690 : masm->isolate()->builtins()->Call();
691 __ Call(builtin, RelocInfo::CODE_TARGET);
701 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
702 Generate_JSEntryTrampolineHelper(masm,
false);
705 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
706 Generate_JSEntryTrampolineHelper(masm,
true);
709 static void ReplaceClosureCodeWithOptimizedCode(
710 MacroAssembler* masm, Register optimized_code, Register closure,
711 Register scratch1, Register scratch2, Register scratch3) {
713 __ Str(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset));
714 __ Mov(scratch1, optimized_code);
715 __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
716 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
720 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
721 Register args_size = scratch;
725 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
726 __ Ldr(args_size.W(),
727 FieldMemOperand(args_size, BytecodeArray::kParameterSizeOffset));
730 __ LeaveFrame(StackFrame::INTERPRETED);
733 if (__ emit_debug_code()) {
734 __ Tst(args_size, kPointerSize - 1);
735 __ Check(eq, AbortReason::kUnexpectedValue);
737 __ Lsr(args_size, args_size, kPointerSizeLog2);
738 __ DropArguments(args_size);
742 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
744 OptimizationMarker marker,
745 Runtime::FunctionId function_id) {
747 __ CompareAndBranch(smi_entry, Operand(Smi::FromEnum(marker)), ne, &no_match);
748 GenerateTailCallToReturnedCode(masm, function_id);
752 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
753 Register feedback_vector,
754 Register scratch1, Register scratch2,
763 !AreAliased(feedback_vector, x0, x1, x3, scratch1, scratch2, scratch3));
765 Label optimized_code_slot_is_weak_ref, fallthrough;
767 Register closure = x1;
768 Register optimized_code_entry = scratch1;
771 optimized_code_entry,
772 FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
777 __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
783 __ CompareAndBranch(optimized_code_entry,
784 Operand(Smi::FromEnum(OptimizationMarker::kNone)), eq,
790 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
791 OptimizationMarker::kLogFirstExecution,
792 Runtime::kFunctionFirstExecution);
793 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
794 OptimizationMarker::kCompileOptimized,
795 Runtime::kCompileOptimized_NotConcurrent);
796 TailCallRuntimeIfMarkerEquals(
797 masm, optimized_code_entry,
798 OptimizationMarker::kCompileOptimizedConcurrent,
799 Runtime::kCompileOptimized_Concurrent);
804 if (FLAG_debug_code) {
806 optimized_code_entry,
807 Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
808 __ Assert(eq, AbortReason::kExpectedOptimizationSentinel);
816 __ bind(&optimized_code_slot_is_weak_ref);
818 __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
822 Label found_deoptimized_code;
823 __ Ldr(scratch2, FieldMemOperand(optimized_code_entry,
824 Code::kCodeDataContainerOffset));
827 FieldMemOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset));
828 __ TestAndBranchIfAnySet(scratch2, 1 << Code::kMarkedForDeoptimizationBit,
829 &found_deoptimized_code);
835 ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
836 scratch2, scratch3, feedback_vector);
837 static_assert(kJavaScriptCallCodeStartRegister == x2,
"ABI mismatch");
838 __ Add(x2, optimized_code_entry,
839 Operand(Code::kHeaderSize - kHeapObjectTag));
844 __ bind(&found_deoptimized_code);
845 GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
850 __ bind(&fallthrough);
856 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
857 Register bytecode_array,
858 Register bytecode_offset,
859 Register bytecode, Register scratch1,
861 Register bytecode_size_table = scratch1;
862 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
865 __ Mov(bytecode_size_table, ExternalReference::bytecode_size_table_address());
868 Label process_bytecode, extra_wide;
869 STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
870 STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
871 STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
873 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
874 __ Cmp(bytecode, Operand(0x3));
875 __ B(hi, &process_bytecode);
876 __ Tst(bytecode, Operand(0x1));
877 __ B(ne, &extra_wide);
880 __ Add(bytecode_offset, bytecode_offset, Operand(1));
881 __ Ldrb(bytecode, MemOperand(bytecode_array, bytecode_offset));
882 __ Add(bytecode_size_table, bytecode_size_table,
883 Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
884 __ B(&process_bytecode);
886 __ Bind(&extra_wide);
888 __ Add(bytecode_offset, bytecode_offset, Operand(1));
889 __ Ldrb(bytecode, MemOperand(bytecode_array, bytecode_offset));
890 __ Add(bytecode_size_table, bytecode_size_table,
891 Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
893 __ Bind(&process_bytecode);
896 #define JUMP_IF_EQUAL(NAME) \ 897 __ Cmp(x1, Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \ 899 RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
903 __ Ldr(scratch1.W(), MemOperand(bytecode_size_table, bytecode, LSL, 2));
904 __ Add(bytecode_offset, bytecode_offset, scratch1);
921 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
922 Register closure = x1;
923 Register feedback_vector = x2;
926 __ Ldr(feedback_vector,
927 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
928 __ Ldr(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset));
930 Label push_stack_frame;
933 __ CompareRoot(feedback_vector, RootIndex::kUndefinedValue);
934 __ B(eq, &push_stack_frame);
938 MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, x7, x4, x5);
942 __ Ldr(w10, FieldMemOperand(feedback_vector,
943 FeedbackVector::kInvocationCountOffset));
944 __ Add(w10, w10, Operand(1));
945 __ Str(w10, FieldMemOperand(feedback_vector,
946 FeedbackVector::kInvocationCountOffset));
951 __ Bind(&push_stack_frame);
952 FrameScope frame_scope(masm, StackFrame::MANUAL);
953 __ Push(lr, fp, cp, closure);
954 __ Add(fp, sp, StandardFrameConstants::kFixedFrameSizeFromFp);
958 Label has_bytecode_array;
959 __ Ldr(x0, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
960 __ Ldr(kInterpreterBytecodeArrayRegister,
961 FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset));
962 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x11, x11,
963 INTERPRETER_DATA_TYPE);
964 __ B(ne, &has_bytecode_array);
965 __ Ldr(kInterpreterBytecodeArrayRegister,
966 FieldMemOperand(kInterpreterBytecodeArrayRegister,
967 InterpreterData::kBytecodeArrayOffset));
968 __ Bind(&has_bytecode_array);
971 if (FLAG_debug_code) {
973 kInterpreterBytecodeArrayRegister,
974 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
975 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x0, x0,
976 BYTECODE_ARRAY_TYPE);
978 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
982 __ Mov(x10, Operand(BytecodeArray::kNoAgeBytecodeAge));
983 __ Strb(x10, FieldMemOperand(kInterpreterBytecodeArrayRegister,
984 BytecodeArray::kBytecodeAgeOffset));
987 __ Mov(kInterpreterBytecodeOffsetRegister,
988 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
991 __ SmiTag(x0, kInterpreterBytecodeOffsetRegister);
992 __ Push(kInterpreterBytecodeArrayRegister, x0);
997 __ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister,
998 BytecodeArray::kFrameSizeOffset));
1002 __ Sub(x10, sp, Operand(x11));
1003 __ CompareRoot(x10, RootIndex::kRealStackLimit);
1005 __ CallRuntime(Runtime::kThrowStackOverflow);
1012 __ LoadRoot(x10, RootIndex::kUndefinedValue);
1013 __ Lsr(x11, x11, kPointerSizeLog2);
1016 __ Add(x11, x11, 1);
1017 __ Bic(x11, x11, 1);
1018 __ PushMultipleTimes(x10, x11);
1019 __ Bind(&loop_header);
1024 Label no_incoming_new_target_or_generator_register;
1027 kInterpreterBytecodeArrayRegister,
1028 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1029 __ Cbz(x10, &no_incoming_new_target_or_generator_register);
1030 __ Str(x3, MemOperand(fp, x10, LSL, kPointerSizeLog2));
1031 __ Bind(&no_incoming_new_target_or_generator_register);
1034 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1039 __ bind(&do_dispatch);
1041 kInterpreterDispatchTableRegister,
1042 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1043 #if defined(V8_OS_WIN) 1044 __ Ldrb(x23, MemOperand(kInterpreterBytecodeArrayRegister,
1045 kInterpreterBytecodeOffsetRegister));
1046 __ Mov(x1, Operand(x23, LSL, kPointerSizeLog2));
1048 __ Ldrb(x18, MemOperand(kInterpreterBytecodeArrayRegister,
1049 kInterpreterBytecodeOffsetRegister));
1050 __ Mov(x1, Operand(x18, LSL, kPointerSizeLog2));
1052 __ Ldr(kJavaScriptCallCodeStartRegister,
1053 MemOperand(kInterpreterDispatchTableRegister, x1));
1054 __ Call(kJavaScriptCallCodeStartRegister);
1055 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1061 __ Ldr(kInterpreterBytecodeArrayRegister,
1062 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1063 __ Ldr(kInterpreterBytecodeOffsetRegister,
1064 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1065 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1069 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
1070 kInterpreterBytecodeOffsetRegister));
1071 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1072 kInterpreterBytecodeOffsetRegister, x1, x2,
1076 __ bind(&do_return);
1078 LeaveInterpreterFrame(masm, x2);
1082 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1084 Register first_arg_index,
1085 Register spread_arg_out,
1086 ConvertReceiverMode receiver_mode,
1087 InterpreterPushArgsMode mode) {
1088 Register last_arg_addr = x10;
1089 Register stack_addr = x11;
1090 Register slots_to_claim = x12;
1091 Register slots_to_copy = x13;
1093 DCHECK(!AreAliased(num_args, first_arg_index, last_arg_addr, stack_addr,
1094 slots_to_claim, slots_to_copy));
1096 DCHECK(!AreAliased(spread_arg_out, last_arg_addr, stack_addr, slots_to_claim,
1100 __ Add(slots_to_claim, num_args, 1);
1102 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1104 __ Sub(slots_to_claim, slots_to_claim, 1);
1105 __ Sub(num_args, num_args, 1);
1109 Label stack_overflow, done;
1110 Generate_StackOverflowCheck(masm, slots_to_claim, &stack_overflow);
1112 __ Bind(&stack_overflow);
1113 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1118 __ Add(slots_to_claim, slots_to_claim, 1);
1119 __ Bic(slots_to_claim, slots_to_claim, 1);
1120 __ Claim(slots_to_claim);
1124 UseScratchRegisterScope temps(masm);
1125 Register scratch = temps.AcquireX();
1126 __ Sub(scratch, slots_to_claim, 1);
1127 __ Poke(padreg, Operand(scratch, LSL, kPointerSizeLog2));
1130 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1132 Register receiver = x14;
1133 __ LoadRoot(receiver, RootIndex::kUndefinedValue);
1134 __ SlotAddress(stack_addr, num_args);
1135 __ Str(receiver, MemOperand(stack_addr));
1136 __ Mov(slots_to_copy, num_args);
1140 __ Add(slots_to_copy, num_args, 1);
1143 __ Sub(last_arg_addr, first_arg_index,
1144 Operand(slots_to_copy, LSL, kPointerSizeLog2));
1145 __ Add(last_arg_addr, last_arg_addr, kPointerSize);
1148 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1149 __ Ldr(spread_arg_out, MemOperand(last_arg_addr, -kPointerSize));
1153 __ SlotAddress(stack_addr, 0);
1154 __ CopyDoubleWords(stack_addr, last_arg_addr, slots_to_copy);
1158 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1159 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1160 InterpreterPushArgsMode mode) {
1161 DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1173 Register num_args = x0;
1174 Register first_arg_index = x2;
1175 Register spread_arg_out =
1176 (mode == InterpreterPushArgsMode::kWithFinalSpread) ? x2 : no_reg;
1177 Generate_InterpreterPushArgs(masm, num_args, first_arg_index, spread_arg_out,
1178 receiver_mode, mode);
1181 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1182 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1183 RelocInfo::CODE_TARGET);
1185 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1186 RelocInfo::CODE_TARGET);
1191 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1192 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1200 __ AssertUndefinedOrAllocationSite(x2);
1205 Register num_args = x0;
1206 Register first_arg_index = x4;
1207 Register spread_arg_out =
1208 (mode == InterpreterPushArgsMode::kWithFinalSpread) ? x2 : no_reg;
1209 Generate_InterpreterPushArgs(masm, num_args, first_arg_index, spread_arg_out,
1210 ConvertReceiverMode::kNullOrUndefined, mode);
1212 if (mode == InterpreterPushArgsMode::kArrayFunction) {
1213 __ AssertFunction(x1);
1217 Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1218 __ Jump(code, RelocInfo::CODE_TARGET);
1219 }
else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1221 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1222 RelocInfo::CODE_TARGET);
1224 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1226 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1230 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1233 Label builtin_trampoline, trampoline_loaded;
1234 Smi interpreter_entry_return_pc_offset(
1235 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1236 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1242 __ Ldr(x1, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1243 __ Ldr(x1, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1244 __ Ldr(x1, FieldMemOperand(x1, SharedFunctionInfo::kFunctionDataOffset));
1245 __ CompareObjectType(x1, kInterpreterDispatchTableRegister,
1246 kInterpreterDispatchTableRegister,
1247 INTERPRETER_DATA_TYPE);
1248 __ B(ne, &builtin_trampoline);
1251 FieldMemOperand(x1, InterpreterData::kInterpreterTrampolineOffset));
1252 __ Add(x1, x1, Operand(Code::kHeaderSize - kHeapObjectTag));
1253 __ B(&trampoline_loaded);
1255 __ Bind(&builtin_trampoline);
1256 __ Mov(x1, ExternalReference::
1257 address_of_interpreter_entry_trampoline_instruction_start(
1259 __ Ldr(x1, MemOperand(x1));
1261 __ Bind(&trampoline_loaded);
1262 __ Add(lr, x1, Operand(interpreter_entry_return_pc_offset->value()));
1266 kInterpreterDispatchTableRegister,
1267 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1270 __ Ldr(kInterpreterBytecodeArrayRegister,
1271 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1273 if (FLAG_debug_code) {
1276 kInterpreterBytecodeArrayRegister,
1277 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1278 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x1, x1,
1279 BYTECODE_ARRAY_TYPE);
1281 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1285 __ Ldr(kInterpreterBytecodeOffsetRegister,
1286 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1287 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1290 #if defined(V8_OS_WIN) 1291 __ Ldrb(x23, MemOperand(kInterpreterBytecodeArrayRegister,
1292 kInterpreterBytecodeOffsetRegister));
1293 __ Mov(x1, Operand(x23, LSL, kPointerSizeLog2));
1295 __ Ldrb(x18, MemOperand(kInterpreterBytecodeArrayRegister,
1296 kInterpreterBytecodeOffsetRegister));
1297 __ Mov(x1, Operand(x18, LSL, kPointerSizeLog2));
1299 __ Ldr(kJavaScriptCallCodeStartRegister,
1300 MemOperand(kInterpreterDispatchTableRegister, x1));
1301 __ Jump(kJavaScriptCallCodeStartRegister);
1304 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1306 __ ldr(kInterpreterBytecodeArrayRegister,
1307 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1308 __ ldr(kInterpreterBytecodeOffsetRegister,
1309 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1310 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1313 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
1314 kInterpreterBytecodeOffsetRegister));
1318 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1319 kInterpreterBytecodeOffsetRegister, x1, x2,
1323 __ SmiTag(x2, kInterpreterBytecodeOffsetRegister);
1324 __ Str(x2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1326 Generate_InterpreterEnterBytecode(masm);
1329 __ bind(&if_return);
1330 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1333 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1334 Generate_InterpreterEnterBytecode(masm);
1337 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1344 Register new_target = x1;
1345 Register target = x3;
1349 FrameScope scope(masm, StackFrame::INTERNAL);
1354 __ Push(argc, new_target, target, padreg);
1359 Register undef = x10;
1360 Register scratch1 = x12;
1361 Register scratch2 = x13;
1362 Register scratch3 = x14;
1363 __ LoadRoot(undef, RootIndex::kUndefinedValue);
1365 Label at_least_one_arg;
1367 DCHECK_NULL(Smi::kZero);
1368 __ Cbnz(argc, &at_least_one_arg);
1371 __ Push(new_target, undef, undef, undef);
1374 __ Bind(&at_least_one_arg);
1376 __ Ldp(scratch2, scratch1,
1377 MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
1381 __ Cmp(argc, Smi::FromInt(2));
1382 __ B(gt, &three_args);
1387 __ CmovX(scratch1, scratch2, lt);
1388 __ CmovX(scratch2, undef, lt);
1389 __ Push(new_target, scratch1, scratch2, undef);
1393 __ Bind(&three_args);
1394 __ Ldr(scratch3, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1396 __ Push(new_target, scratch3, scratch1, scratch2);
1398 __ Bind(&args_done);
1401 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1404 __ JumpIfSmi(x0, &failed);
1407 __ SmiUntag(x4, MemOperand(sp, 3 * kPointerSize));
1409 scope.GenerateLeaveFrame();
1412 __ DropArguments(x4, TurboAssembler::kCountExcludesReceiver);
1417 __ Pop(padreg, target, new_target, argc);
1422 __ Ldr(x4, FieldMemOperand(new_target, JSFunction::kCodeOffset));
1423 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
1428 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1429 bool java_script_builtin,
1431 const RegisterConfiguration* config(RegisterConfiguration::Default());
1432 int allocatable_register_count = config->num_allocatable_general_registers();
1433 int frame_size = BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp +
1434 (allocatable_register_count +
1435 BuiltinContinuationFrameConstants::PaddingSlotCount(
1436 allocatable_register_count)) *
1440 __ Add(fp, sp, frame_size);
1446 MemOperand(fp, BuiltinContinuationFrameConstants::kCallerSPOffset));
1450 int offset = -BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp -
1451 allocatable_register_count * kPointerSize;
1452 for (
int i = allocatable_register_count - 1;
i > 0;
i -= 2) {
1453 int code1 = config->GetAllocatableGeneralCode(
i);
1454 int code2 = config->GetAllocatableGeneralCode(
i - 1);
1455 Register reg1 = Register::from_code(code1);
1456 Register reg2 = Register::from_code(code2);
1457 __ Ldp(reg1, reg2, MemOperand(fp, offset));
1458 offset += 2 * kPointerSize;
1462 if (allocatable_register_count % 2 != 0) {
1463 int code = config->GetAllocatableGeneralCode(0);
1464 __ Ldr(Register::from_code(code), MemOperand(fp, offset));
1467 if (java_script_builtin) __ SmiUntag(kJavaScriptCallArgCountRegister);
1470 UseScratchRegisterScope temps(masm);
1471 Register builtin = temps.AcquireX();
1473 MemOperand(fp, BuiltinContinuationFrameConstants::kBuiltinOffset));
1480 __ Add(builtin, builtin, Code::kHeaderSize - kHeapObjectTag);
1485 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1486 Generate_ContinueToBuiltinHelper(masm,
false,
false);
1489 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1490 MacroAssembler* masm) {
1491 Generate_ContinueToBuiltinHelper(masm,
false,
true);
1494 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1495 Generate_ContinueToBuiltinHelper(masm,
true,
false);
1498 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1499 MacroAssembler* masm) {
1500 Generate_ContinueToBuiltinHelper(masm,
true,
true);
1503 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1505 FrameScope scope(masm, StackFrame::INTERNAL);
1506 __ CallRuntime(Runtime::kNotifyDeoptimized);
1510 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), x0.code());
1515 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1517 __ Ldr(x0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1518 __ Ldr(x0, MemOperand(x0, JavaScriptFrameConstants::kFunctionOffset));
1521 FrameScope scope(masm, StackFrame::INTERNAL);
1523 __ PushArgument(x0);
1524 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1529 __ CompareAndBranch(x0, Smi::zero(), ne, &skip);
1536 __ LeaveFrame(StackFrame::STUB);
1540 __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1545 FieldMemOperand(x1, FixedArray::OffsetOfElementAt(
1546 DeoptimizationData::kOsrPcOffsetIndex)));
1551 __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
1558 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1565 ASM_LOCATION(
"Builtins::Generate_FunctionPrototypeApply");
1568 Register arg_array = x2;
1569 Register receiver = x1;
1570 Register this_arg = x0;
1571 Register undefined_value = x3;
1572 Register null_value = x4;
1574 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
1575 __ LoadRoot(null_value, RootIndex::kNullValue);
1581 Register saved_argc = x10;
1582 Register scratch = x11;
1586 __ Push(undefined_value, undefined_value);
1600 __ SlotAddress(scratch, argc);
1602 __ Mov(saved_argc, argc);
1603 __ Ldp(arg_array, this_arg, MemOperand(scratch));
1604 __ Ldr(receiver, MemOperand(scratch, 2 * kPointerSize));
1607 __ DropArguments(saved_argc, TurboAssembler::kCountExcludesReceiver);
1609 __ PushArgument(this_arg);
1624 __ Cmp(arg_array, null_value);
1625 __ Ccmp(arg_array, undefined_value, ZFlag, ne);
1626 __ B(eq, &no_arguments);
1629 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1630 RelocInfo::CODE_TARGET);
1634 __ Bind(&no_arguments);
1637 DCHECK(receiver.Is(x1));
1638 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1643 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1645 Register
function = x1;
1647 ASM_LOCATION(
"Builtins::Generate_FunctionPrototypeCall");
1650 __ Peek(
function, Operand(argc, LSL, kXRegSizeLog2));
1655 Register scratch = x10;
1656 __ Cbnz(argc, &non_zero);
1657 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
1660 __ Poke(scratch, 0);
1662 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1668 Label arguments_ready;
1669 __ Poke(padreg, Operand(argc, LSL, kXRegSizeLog2));
1670 __ Tbnz(argc, 0, &arguments_ready);
1677 Register copy_from = x10;
1678 Register copy_to = x11;
1679 Register count = x12;
1680 Register last_arg_slot = x13;
1681 __ Mov(count, argc);
1682 __ Sub(last_arg_slot, argc, 1);
1683 __ SlotAddress(copy_from, last_arg_slot);
1684 __ Add(copy_to, copy_from, 2 * kPointerSize);
1685 __ CopyDoubleWords(copy_to, copy_from, count,
1686 TurboAssembler::kSrcLessThanDst);
1693 __ Bind(&arguments_ready);
1694 __ Sub(argc, argc, 1);
1695 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1698 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1706 ASM_LOCATION(
"Builtins::Generate_ReflectApply");
1709 Register arguments_list = x2;
1710 Register target = x1;
1711 Register this_argument = x4;
1712 Register undefined_value = x3;
1714 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
1723 __ Push(undefined_value, undefined_value, undefined_value, undefined_value);
1748 Register scratch = x10;
1749 __ SlotAddress(scratch, argc);
1750 __ Ldp(arguments_list, this_argument,
1751 MemOperand(scratch, 1 * kPointerSize));
1752 __ Ldr(target, MemOperand(scratch, 3 * kPointerSize));
1755 __ DropArguments(argc, TurboAssembler::kCountExcludesReceiver);
1757 __ PushArgument(this_argument);
1771 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1772 RelocInfo::CODE_TARGET);
1775 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1783 ASM_LOCATION(
"Builtins::Generate_ReflectConstruct");
1786 Register arguments_list = x2;
1787 Register target = x1;
1788 Register new_target = x3;
1789 Register undefined_value = x4;
1791 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
1801 __ Push(undefined_value, undefined_value, undefined_value, undefined_value);
1826 Register scratch = x10;
1827 __ SlotAddress(scratch, argc);
1828 __ Ldp(new_target, arguments_list, MemOperand(scratch, 1 * kPointerSize));
1829 __ Ldr(target, MemOperand(scratch, 3 * kPointerSize));
1832 __ CmovX(new_target, target, ls);
1835 __ DropArguments(argc, TurboAssembler::kCountExcludesReceiver);
1838 __ PushArgument(undefined_value);
1857 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1858 RelocInfo::CODE_TARGET);
1863 void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1865 __ Mov(x11, StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR));
1868 __ Push(x11, padreg);
1869 __ Add(fp, sp, ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp);
1872 void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1878 __ Ldr(x10, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1884 __ DropArguments(x10, TurboAssembler::kCountExcludesReceiver);
1890 void Generate_PrepareForCopyingVarargs(MacroAssembler* masm, Register argc,
1892 Label len_odd, exit;
1893 Register slots_to_copy = x10;
1894 __ Add(slots_to_copy, argc, 1);
1895 __ Add(argc, argc, len);
1896 __ Tbnz(len, 0, &len_odd);
1906 Register scratch = x11;
1907 Register slots_to_claim = x12;
1908 __ Add(slots_to_claim, len, 1);
1909 __ And(scratch, argc, 1);
1910 __ Sub(slots_to_claim, slots_to_claim, Operand(scratch, LSL, 1));
1911 __ Claim(slots_to_claim);
1915 __ Tbz(slots_to_copy, 0, ©_down);
1921 Register scratch = x13;
1922 __ Sub(scratch, argc, 1);
1923 __ SlotAddress(src, scratch);
1924 __ SlotAddress(dst, argc);
1925 __ CopyDoubleWords(dst, src, slots_to_copy,
1926 TurboAssembler::kSrcLessThanDst);
1931 __ Bind(©_down);
1935 Register scratch = x13;
1936 __ Add(src, len, 1);
1938 __ CopySlots(dst, src, slots_to_copy);
1939 __ Add(scratch, argc, 1);
1940 __ Poke(padreg, Operand(scratch, LSL, kPointerSizeLog2));
1949 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1950 Handle<Code> code) {
1958 if (masm->emit_debug_code()) {
1961 __ AssertNotSmi(x2, AbortReason::kOperandIsNotAFixedArray);
1962 __ Ldr(x10, FieldMemOperand(x2, HeapObject::kMapOffset));
1963 __ Ldrh(x13, FieldMemOperand(x10, Map::kInstanceTypeOffset));
1964 __ Cmp(x13, FIXED_ARRAY_TYPE);
1966 __ Cmp(x13, FIXED_DOUBLE_ARRAY_TYPE);
1972 __ Abort(AbortReason::kOperandIsNotAFixedArray);
1977 Register arguments_list = x2;
1981 Label stack_overflow;
1982 Generate_StackOverflowCheck(masm, len, &stack_overflow);
1988 Generate_PrepareForCopyingVarargs(masm, argc, len);
1994 Register the_hole_value = x11;
1995 Register undefined_value = x12;
1996 Register scratch = x13;
1997 __ Add(src, arguments_list, FixedArray::kHeaderSize - kHeapObjectTag);
1998 __ LoadRoot(the_hole_value, RootIndex::kTheHoleValue);
1999 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
2004 __ Sub(len, len, 1);
2005 __ Ldr(scratch, MemOperand(src, kPointerSize, PostIndex));
2006 __ Cmp(scratch, the_hole_value);
2007 __ Csel(scratch, scratch, undefined_value, ne);
2008 __ Poke(scratch, Operand(len, LSL, kPointerSizeLog2));
2009 __ Cbnz(len, &loop);
2014 __ Jump(code, RelocInfo::CODE_TARGET);
2016 __ bind(&stack_overflow);
2017 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2021 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2022 CallOrConstructMode mode,
2023 Handle<Code> code) {
2032 Register start_index = x2;
2035 if (mode == CallOrConstructMode::kConstruct) {
2036 Label new_target_constructor, new_target_not_constructor;
2037 __ JumpIfSmi(x3, &new_target_not_constructor);
2038 __ Ldr(x5, FieldMemOperand(x3, HeapObject::kMapOffset));
2039 __ Ldrb(x5, FieldMemOperand(x5, Map::kBitFieldOffset));
2040 __ TestAndBranchIfAnySet(x5, Map::IsConstructorBit::kMask,
2041 &new_target_constructor);
2042 __ Bind(&new_target_not_constructor);
2044 FrameScope scope(masm, StackFrame::MANUAL);
2045 __ EnterFrame(StackFrame::INTERNAL);
2046 __ PushArgument(x3);
2047 __ CallRuntime(Runtime::kThrowNotConstructor);
2049 __ Bind(&new_target_constructor);
2056 Register args_fp = x5;
2059 Label arguments_adaptor, arguments_done;
2060 Register scratch = x10;
2061 __ Ldr(args_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2062 __ Ldr(x4, MemOperand(args_fp,
2063 CommonFrameConstants::kContextOrFrameTypeOffset));
2064 __ Cmp(x4, StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR));
2065 __ B(eq, &arguments_adaptor);
2068 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2070 FieldMemOperand(scratch, JSFunction::kSharedFunctionInfoOffset));
2072 FieldMemOperand(scratch,
2073 SharedFunctionInfo::kFormalParameterCountOffset));
2074 __ Mov(args_fp, fp);
2076 __ B(&arguments_done);
2077 __ Bind(&arguments_adaptor);
2082 MemOperand(args_fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2084 __ Bind(&arguments_done);
2087 Label stack_done, stack_overflow;
2088 __ Subs(len, len, start_index);
2089 __ B(le, &stack_done);
2091 Generate_StackOverflowCheck(masm, x6, &stack_overflow);
2093 Generate_PrepareForCopyingVarargs(masm, argc, len);
2098 __ Add(args_fp, args_fp, 2 * kPointerSize);
2099 __ SlotAddress(dst, 0);
2100 __ CopyDoubleWords(dst, args_fp, len);
2104 __ Bind(&stack_overflow);
2105 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2106 __ Bind(&stack_done);
2108 __ Jump(code, RelocInfo::CODE_TARGET);
2112 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2113 ConvertReceiverMode mode) {
2114 ASM_LOCATION(
"Builtins::Generate_CallFunction");
2119 __ AssertFunction(x1);
2123 Label class_constructor;
2124 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2125 __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kFlagsOffset));
2126 __ TestAndBranchIfAnySet(w3, SharedFunctionInfo::IsClassConstructorBit::kMask,
2127 &class_constructor);
2132 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
2135 __ TestAndBranchIfAnySet(w3,
2136 SharedFunctionInfo::IsNativeBit::kMask |
2137 SharedFunctionInfo::IsStrictBit::kMask,
2147 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2149 __ LoadGlobalProxy(x3);
2151 Label convert_to_object, convert_receiver;
2152 __ Peek(x3, Operand(x0, LSL, kXRegSizeLog2));
2153 __ JumpIfSmi(x3, &convert_to_object);
2154 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2155 __ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE);
2156 __ B(hs, &done_convert);
2157 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2158 Label convert_global_proxy;
2159 __ JumpIfRoot(x3, RootIndex::kUndefinedValue, &convert_global_proxy);
2160 __ JumpIfNotRoot(x3, RootIndex::kNullValue, &convert_to_object);
2161 __ Bind(&convert_global_proxy);
2164 __ LoadGlobalProxy(x3);
2166 __ B(&convert_receiver);
2168 __ Bind(&convert_to_object);
2173 FrameScope scope(masm, StackFrame::INTERNAL);
2175 __ Push(padreg, x0, x1, cp);
2177 __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
2178 RelocInfo::CODE_TARGET);
2180 __ Pop(cp, x1, x0, padreg);
2183 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2184 __ Bind(&convert_receiver);
2186 __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2));
2188 __ Bind(&done_convert);
2198 FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
2199 ParameterCount actual(x0);
2200 ParameterCount expected(x2);
2201 __ InvokeFunctionCode(x1, no_reg, expected, actual, JUMP_FUNCTION);
2204 __ Bind(&class_constructor);
2206 FrameScope frame(masm, StackFrame::INTERNAL);
2207 __ PushArgument(x1);
2208 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2214 void Generate_PushBoundArguments(MacroAssembler* masm) {
2221 Register bound_argc = x4;
2222 Register bound_argv = x2;
2225 Label no_bound_arguments;
2227 FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset));
2228 __ SmiUntag(bound_argc,
2229 FieldMemOperand(bound_argv, FixedArray::kLengthOffset));
2230 __ Cbz(bound_argc, &no_bound_arguments);
2248 __ LoadRoot(x10, RootIndex::kRealStackLimit);
2251 __ Sub(x10, sp, x10);
2253 __ Cmp(x10, Operand(bound_argc, LSL, kPointerSizeLog2));
2255 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2260 Label copy_args, copy_bound_args;
2261 Register total_argc = x15;
2262 Register slots_to_claim = x12;
2263 __ Add(total_argc, argc, bound_argc);
2264 __ Mov(slots_to_claim, bound_argc);
2265 __ Tbz(bound_argc, 0, ©_args);
2269 Register receiver = x14;
2270 __ Peek(receiver, Operand(argc, LSL, kPointerSizeLog2));
2277 Register scratch = x11;
2278 __ Add(slots_to_claim, bound_argc, 1);
2279 __ And(scratch, total_argc, 1);
2280 __ Sub(slots_to_claim, slots_to_claim, Operand(scratch, LSL, 1));
2284 __ Bind(©_args);
2288 __ Cbz(slots_to_claim, ©_bound_args);
2289 __ Claim(slots_to_claim);
2291 Register count = x10;
2293 __ Mov(count, argc);
2294 __ CopySlots(0, slots_to_claim, count);
2296 __ Bind(©_bound_args);
2301 Register counter = x10;
2302 Register scratch = x11;
2303 Register copy_to = x12;
2304 __ Add(bound_argv, bound_argv,
2305 FixedArray::kHeaderSize - kHeapObjectTag);
2306 __ SlotAddress(copy_to, argc);
2309 __ Lsl(counter, bound_argc, kPointerSizeLog2);
2311 __ Sub(counter, counter, kPointerSize);
2312 __ Ldr(scratch, MemOperand(bound_argv, counter));
2314 __ Str(scratch, MemOperand(copy_to, kPointerSize, PostIndex));
2315 __ Cbnz(counter, &loop);
2320 Register scratch = x10;
2321 __ Tbz(bound_argc, 0, &done);
2323 __ Add(scratch, sp, Operand(total_argc, LSL, kPointerSizeLog2));
2324 __ Str(receiver, MemOperand(scratch, kPointerSize, PostIndex));
2325 __ Tbnz(total_argc, 0, &done);
2327 __ Str(padreg, MemOperand(scratch));
2332 __ Bind(&no_bound_arguments);
2338 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2343 __ AssertBoundFunction(x1);
2346 __ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset));
2347 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
2350 Generate_PushBoundArguments(masm);
2353 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2354 __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2355 RelocInfo::CODE_TARGET);
2359 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2365 Label non_callable, non_function, non_smi;
2366 __ JumpIfSmi(x1, &non_callable);
2368 __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
2369 __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2370 RelocInfo::CODE_TARGET, eq);
2371 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
2372 __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2373 RelocInfo::CODE_TARGET, eq);
2376 __ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset));
2377 __ TestAndBranchIfAllClear(x4, Map::IsCallableBit::kMask, &non_callable);
2380 __ Cmp(x5, JS_PROXY_TYPE);
2381 __ B(ne, &non_function);
2382 __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
2386 __ Bind(&non_function);
2388 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2390 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1);
2391 __ Jump(masm->isolate()->builtins()->CallFunction(
2392 ConvertReceiverMode::kNotNullOrUndefined),
2393 RelocInfo::CODE_TARGET);
2396 __ bind(&non_callable);
2398 FrameScope scope(masm, StackFrame::INTERNAL);
2399 __ PushArgument(x1);
2400 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2405 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2411 __ AssertConstructor(x1);
2412 __ AssertFunction(x1);
2416 __ LoadRoot(x2, RootIndex::kUndefinedValue);
2418 Label call_generic_stub;
2421 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
2422 __ Ldr(w4, FieldMemOperand(x4, SharedFunctionInfo::kFlagsOffset));
2423 __ TestAndBranchIfAllClear(
2424 w4, SharedFunctionInfo::ConstructAsBuiltinBit::kMask, &call_generic_stub);
2426 __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2427 RelocInfo::CODE_TARGET);
2429 __ bind(&call_generic_stub);
2430 __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2431 RelocInfo::CODE_TARGET);
2435 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2441 __ AssertConstructor(x1);
2442 __ AssertBoundFunction(x1);
2445 Generate_PushBoundArguments(masm);
2453 FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2458 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
2459 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2463 void Builtins::Generate_Construct(MacroAssembler* masm) {
2472 Label non_constructor, non_proxy;
2473 __ JumpIfSmi(x1, &non_constructor);
2476 __ Ldr(x4, FieldMemOperand(x1, HeapObject::kMapOffset));
2477 __ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset));
2478 __ TestAndBranchIfAllClear(x2, Map::IsConstructorBit::kMask,
2482 __ CompareInstanceType(x4, x5, JS_FUNCTION_TYPE);
2483 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2484 RelocInfo::CODE_TARGET, eq);
2488 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
2489 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2490 RelocInfo::CODE_TARGET, eq);
2493 __ Cmp(x5, JS_PROXY_TYPE);
2494 __ B(ne, &non_proxy);
2495 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2496 RelocInfo::CODE_TARGET);
2499 __ bind(&non_proxy);
2502 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
2504 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, x1);
2505 __ Jump(masm->isolate()->builtins()->CallFunction(),
2506 RelocInfo::CODE_TARGET);
2511 __ bind(&non_constructor);
2512 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2513 RelocInfo::CODE_TARGET);
2516 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2517 ASM_LOCATION(
"Builtins::Generate_ArgumentsAdaptorTrampoline");
2576 Register argc_actual = x0;
2577 Register argc_expected = x2;
2578 Register
function = x1;
2580 Label dont_adapt_arguments, stack_overflow;
2582 Label enough_arguments;
2583 __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2584 __ B(eq, &dont_adapt_arguments);
2586 EnterArgumentsAdaptorFrame(masm);
2588 Register copy_from = x10;
2589 Register copy_end = x11;
2590 Register copy_to = x12;
2591 Register argc_to_copy = x13;
2592 Register argc_unused_actual = x14;
2593 Register scratch1 = x15, scratch2 = x16;
2597 __ RecordComment(
"-- Stack check --");
2598 __ Add(scratch1, argc_expected, 1);
2599 Generate_StackOverflowCheck(masm, scratch1, &stack_overflow);
2602 __ RecordComment(
"-- Allocate callee frame slots --");
2603 __ Add(scratch1, scratch1, 1);
2604 __ Bic(scratch1, scratch1, 1);
2605 __ Claim(scratch1, kPointerSize);
2607 __ Mov(copy_to, sp);
2615 __ Subs(scratch1, argc_actual, argc_expected);
2616 __ Csel(argc_unused_actual, xzr, scratch1, lt);
2617 __ Csel(argc_to_copy, argc_expected, argc_actual, ge);
2618 __ B(ge, &enough_arguments);
2621 __ RecordComment(
"-- Fill slots with undefined --");
2622 __ Sub(copy_end, copy_to, Operand(scratch1, LSL, kPointerSizeLog2));
2623 __ LoadRoot(scratch1, RootIndex::kUndefinedValue);
2627 __ Stp(scratch1, scratch1, MemOperand(copy_to, 2 * kPointerSize, PostIndex));
2630 __ Cmp(copy_end, copy_to);
2634 __ Mov(copy_to, copy_end);
2636 __ Bind(&enough_arguments);
2639 __ RecordComment(
"-- Copy actual arguments --");
2640 __ Cbz(argc_to_copy, &skip_copy);
2641 __ Add(copy_end, copy_to, Operand(argc_to_copy, LSL, kPointerSizeLog2));
2642 __ Add(copy_from, fp, 2 * kPointerSize);
2644 __ Add(copy_from, copy_from,
2645 Operand(argc_unused_actual, LSL, kPointerSizeLog2));
2651 __ Bind(©_2_by_2);
2652 __ Ldp(scratch1, scratch2,
2653 MemOperand(copy_from, 2 * kPointerSize, PostIndex));
2654 __ Stp(scratch1, scratch2, MemOperand(copy_to, 2 * kPointerSize, PostIndex));
2655 __ Cmp(copy_end, copy_to);
2656 __ B(hi, ©_2_by_2);
2657 __ Bind(&skip_copy);
2661 __ RecordComment(
"-- Store padding --");
2662 __ Str(padreg, MemOperand(fp, -5 * kPointerSize));
2668 __ RecordComment(
"-- Store receiver --");
2669 __ Add(copy_from, fp, 2 * kPointerSize);
2670 __ Ldr(scratch1, MemOperand(copy_from, argc_actual, LSL, kPointerSizeLog2));
2671 __ Str(scratch1, MemOperand(sp, argc_expected, LSL, kPointerSizeLog2));
2674 __ RecordComment(
"-- Call entry point --");
2675 __ Mov(argc_actual, argc_expected);
2679 static_assert(kJavaScriptCallCodeStartRegister == x2,
"ABI mismatch");
2680 __ Ldr(x2, FieldMemOperand(
function, JSFunction::kCodeOffset));
2681 __ Add(x2, x2, Operand(Code::kHeaderSize - kHeapObjectTag));
2685 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2688 LeaveArgumentsAdaptorFrame(masm);
2692 __ RecordComment(
"-- Call without adapting args --");
2693 __ Bind(&dont_adapt_arguments);
2694 static_assert(kJavaScriptCallCodeStartRegister == x2,
"ABI mismatch");
2695 __ Ldr(x2, FieldMemOperand(
function, JSFunction::kCodeOffset));
2696 __ Add(x2, x2, Operand(Code::kHeaderSize - kHeapObjectTag));
2699 __ Bind(&stack_overflow);
2700 __ RecordComment(
"-- Stack overflow --");
2702 FrameScope frame(masm, StackFrame::MANUAL);
2703 __ CallRuntime(Runtime::kThrowStackOverflow);
2708 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2711 __ sxtw(kWasmCompileLazyFuncIndexRegister,
2712 kWasmCompileLazyFuncIndexRegister.W());
2713 __ SmiTag(kWasmCompileLazyFuncIndexRegister,
2714 kWasmCompileLazyFuncIndexRegister);
2716 HardAbortScope hard_abort(masm);
2717 FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2722 constexpr RegList gp_regs =
2723 Register::ListOf<x0, x1, x2, x3, x4, x5, x6, x7>();
2724 constexpr RegList fp_regs =
2725 Register::ListOf<d0, d1, d2, d3, d4, d5, d6, d7>();
2726 __ PushXRegList(gp_regs);
2727 __ PushDRegList(fp_regs);
2731 __ Push(kWasmInstanceRegister, kWasmCompileLazyFuncIndexRegister);
2733 __ Ldr(x2, FieldMemOperand(kWasmInstanceRegister,
2734 WasmInstanceObject::kCEntryStubOffset));
2737 __ Mov(cp, Smi::zero());
2738 __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, x2);
2740 __ mov(x8, kReturnRegister0);
2743 __ PopDRegList(fp_regs);
2744 __ PopXRegList(gp_regs);
2750 void Builtins::Generate_CEntry(MacroAssembler* masm,
int result_size,
2751 SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2752 bool builtin_exit_frame) {
2758 HardAbortScope hard_aborts(masm);
2760 ASM_LOCATION(
"CEntry::Generate entry");
2779 const Register& argc_input = x0;
2780 const Register& target_input = x1;
2789 const Register& argv = x21;
2790 const Register& argc = x22;
2791 const Register& target = x23;
2796 Register temp_argv = x11;
2797 if (argv_mode == kArgvOnStack) {
2798 __ SlotAddress(temp_argv, x0);
2800 __ Sub(temp_argv, temp_argv, 1 * kPointerSize);
2804 int extra_stack_space = 3;
2806 FrameScope scope(masm, StackFrame::MANUAL);
2808 save_doubles == kSaveFPRegs, x10, extra_stack_space,
2809 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2812 __ Poke(argv, 1 * kPointerSize);
2813 __ Poke(argc, 2 * kPointerSize);
2814 __ Poke(target, 3 * kPointerSize);
2821 __ Mov(argc, argc_input);
2822 __ Mov(target, target_input);
2823 __ Mov(argv, temp_argv);
2862 __ Mov(x2, ExternalReference::isolate_address(masm->isolate()));
2864 Label return_location;
2865 __ Adr(x12, &return_location);
2868 if (__ emit_debug_code()) {
2871 UseScratchRegisterScope temps(masm);
2872 Register temp = temps.AcquireX();
2873 __ Ldr(temp, MemOperand(fp, ExitFrameConstants::kSPOffset));
2874 __ Ldr(temp, MemOperand(temp, -static_cast<int64_t>(kXRegSize)));
2876 __ Check(eq, AbortReason::kReturnAddressNotFoundInFrame);
2881 __ Bind(&return_location);
2890 const Register& result = x0;
2893 Label exception_returned;
2894 __ CompareRoot(result, RootIndex::kException);
2895 __ B(eq, &exception_returned);
2902 __ Peek(argv, 1 * kPointerSize);
2903 __ Peek(argc, 2 * kPointerSize);
2904 __ Peek(target, 3 * kPointerSize);
2906 __ LeaveExitFrame(save_doubles == kSaveFPRegs, x10, x9);
2907 if (argv_mode == kArgvOnStack) {
2909 __ DropArguments(x11);
2911 __ AssertFPCRState();
2915 __ Bind(&exception_returned);
2917 ExternalReference pending_handler_context_address = ExternalReference::Create(
2918 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2919 ExternalReference pending_handler_entrypoint_address =
2920 ExternalReference::Create(
2921 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2922 ExternalReference pending_handler_fp_address = ExternalReference::Create(
2923 IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2924 ExternalReference pending_handler_sp_address = ExternalReference::Create(
2925 IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2929 ExternalReference find_handler =
2930 ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2932 FrameScope scope(masm, StackFrame::MANUAL);
2935 __ Mov(x2, ExternalReference::isolate_address(masm->isolate()));
2936 __ CallCFunction(find_handler, 3);
2940 __ Mov(cp, pending_handler_context_address);
2941 __ Ldr(cp, MemOperand(cp));
2943 UseScratchRegisterScope temps(masm);
2944 Register scratch = temps.AcquireX();
2945 __ Mov(scratch, pending_handler_sp_address);
2946 __ Ldr(scratch, MemOperand(scratch));
2947 __ Mov(sp, scratch);
2949 __ Mov(fp, pending_handler_fp_address);
2950 __ Ldr(fp, MemOperand(fp));
2955 __ Cbz(cp, ¬_js_frame);
2956 __ Str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2957 __ Bind(¬_js_frame);
2963 __ ResetSpeculationPoisonRegister();
2966 __ Mov(x10, pending_handler_entrypoint_address);
2967 __ Ldr(x10, MemOperand(x10));
2971 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2973 Register result = x7;
2975 DCHECK(result.Is64Bits());
2977 HardAbortScope hard_abort(masm);
2978 UseScratchRegisterScope temps(masm);
2979 Register scratch1 = temps.AcquireX();
2980 Register scratch2 = temps.AcquireX();
2981 DoubleRegister double_scratch = temps.AcquireD();
2984 const int kArgumentOffset = 2 * kPointerSize;
2986 __ Push(result, scratch1);
2987 __ Peek(double_scratch, kArgumentOffset);
2991 __ TryConvertDoubleToInt64(result, double_scratch, &done);
2992 __ Fmov(result, double_scratch);
2997 Register exponent = scratch1;
2998 __ Ubfx(exponent, result, HeapNumber::kMantissaBits,
2999 HeapNumber::kExponentBits);
3003 __ Cmp(exponent, HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 32);
3004 __ CzeroX(result, ge);
3011 if (masm->emit_debug_code()) {
3012 __ Cmp(exponent, HeapNumber::kExponentBias + 63);
3014 __ Check(ge, AbortReason::kUnexpectedValue);
3018 Register mantissa = scratch2;
3019 __ Ubfx(mantissa, result, 0, HeapNumber::kMantissaBits);
3020 __ Orr(mantissa, mantissa, 1ULL << HeapNumber::kMantissaBits);
3023 __ Tst(result, kXSignMask);
3024 __ Cneg(mantissa, mantissa, ne);
3028 __ Sub(exponent, exponent,
3029 HeapNumber::kExponentBias + HeapNumber::kMantissaBits);
3030 __ Lsl(result, mantissa, exponent);
3033 __ Poke(result, kArgumentOffset);
3034 __ Pop(scratch1, result);
3038 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
3039 Register exponent_integer = x12;
3040 Register saved_lr = x19;
3041 VRegister result_double = d0;
3042 VRegister base_double = d0;
3043 VRegister exponent_double = d1;
3044 VRegister base_double_copy = d2;
3045 VRegister scratch1_double = d6;
3046 VRegister scratch0_double = d7;
3049 Label exponent_is_integer;
3058 __ TryRepresentDoubleAsInt64(exponent_integer, exponent_double,
3059 scratch0_double, &exponent_is_integer);
3062 AllowExternalCallThatCantCauseGC scope(masm);
3063 __ Mov(saved_lr, lr);
3064 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
3065 __ Mov(lr, saved_lr);
3069 __ Bind(&exponent_is_integer);
3072 Register exponent_abs = x13;
3073 __ Cmp(exponent_integer, 0);
3074 __ Cneg(exponent_abs, exponent_integer, mi);
3087 Label power_loop, power_loop_entry, power_loop_exit;
3088 __ Fmov(scratch1_double, base_double);
3089 __ Fmov(base_double_copy, base_double);
3090 __ Fmov(result_double, 1.0);
3091 __ B(&power_loop_entry);
3093 __ Bind(&power_loop);
3094 __ Fmul(scratch1_double, scratch1_double, scratch1_double);
3095 __ Lsr(exponent_abs, exponent_abs, 1);
3096 __ Cbz(exponent_abs, &power_loop_exit);
3098 __ Bind(&power_loop_entry);
3099 __ Tbz(exponent_abs, 0, &power_loop);
3100 __ Fmul(result_double, result_double, scratch1_double);
3103 __ Bind(&power_loop_exit);
3106 __ Tbz(exponent_integer, kXSignBit, &done);
3109 __ Fmov(scratch0_double, 1.0);
3110 __ Fdiv(result_double, scratch0_double, result_double);
3117 __ Fcmp(result_double, 0.0);
3120 AllowExternalCallThatCantCauseGC scope(masm);
3121 __ Mov(saved_lr, lr);
3122 __ Fmov(base_double, base_double_copy);
3123 __ Scvtf(exponent_double, exponent_integer);
3124 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
3125 __ Mov(lr, saved_lr);
3132 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
3133 ElementsKind kind) {
3134 Label zero_case, n_case;
3137 __ Cbz(argc, &zero_case);
3138 __ CompareAndBranch(argc, 1, ne, &n_case);
3141 if (IsFastPackedElementsKind(kind)) {
3146 __ Cbz(x10, &packed_case);
3148 __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
3149 masm->isolate(), GetHoleyElementsKind(kind))
3151 RelocInfo::CODE_TARGET);
3153 __ Bind(&packed_case);
3157 CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
3159 RelocInfo::CODE_TARGET);
3161 __ Bind(&zero_case);
3163 __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
3165 RelocInfo::CODE_TARGET);
3171 __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue);
3172 Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
3173 __ Jump(code, RelocInfo::CODE_TARGET);
3178 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
3186 Register constructor = x1;
3188 if (FLAG_debug_code) {
3192 Label unexpected_map, map_ok;
3194 __ Ldr(x10, FieldMemOperand(constructor,
3195 JSFunction::kPrototypeOrInitialMapOffset));
3197 __ JumpIfSmi(x10, &unexpected_map);
3198 __ JumpIfObjectType(x10, x10, x11, MAP_TYPE, &map_ok);
3199 __ Bind(&unexpected_map);
3200 __ Abort(AbortReason::kUnexpectedInitialMapForArrayFunction);
3206 __ Ldr(x10, FieldMemOperand(constructor,
3207 JSFunction::kPrototypeOrInitialMapOffset));
3210 __ LoadElementsKindFromMap(kind, x10);
3212 if (FLAG_debug_code) {
3214 __ Cmp(x3, PACKED_ELEMENTS);
3215 __ Ccmp(x3, HOLEY_ELEMENTS, ZFlag, ne);
3218 AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
3221 Label fast_elements_case;
3222 __ CompareAndBranch(kind, PACKED_ELEMENTS, eq, &fast_elements_case);
3223 GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
3225 __ Bind(&fast_elements_case);
3226 GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
3234 #endif // V8_TARGET_ARCH_ARM