7 #include "src/assembler-inl.h" 8 #include "src/code-factory.h" 9 #include "src/code-stubs.h" 10 #include "src/counters.h" 11 #include "src/debug/debug.h" 12 #include "src/deoptimizer.h" 13 #include "src/frame-constants.h" 14 #include "src/frames.h" 15 #include "src/objects-inl.h" 16 #include "src/objects/js-generator.h" 17 #include "src/objects/smi.h" 18 #include "src/register-configuration.h" 19 #include "src/runtime/runtime.h" 20 #include "src/wasm/wasm-objects.h" 25 #define __ ACCESS_MASM(masm) 27 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
28 ExitFrameType exit_frame_type) {
29 #if defined(__thumb__) 31 DCHECK_EQ(1, reinterpret_cast<uintptr_t>(
32 ExternalReference::Create(address).address()) &
35 __ Move(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
36 if (exit_frame_type == BUILTIN_EXIT) {
37 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
38 RelocInfo::CODE_TARGET);
40 DCHECK(exit_frame_type == EXIT);
41 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
42 RelocInfo::CODE_TARGET);
46 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
52 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
54 if (FLAG_debug_code) {
56 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
58 __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
59 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
60 __ Assert(eq, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
65 __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
66 RelocInfo::CODE_TARGET);
69 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
70 Runtime::FunctionId function_id) {
77 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
87 __ CallRuntime(function_id, 1);
96 static_assert(kJavaScriptCallCodeStartRegister == r2,
"ABI mismatch");
97 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
103 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
113 Register scratch = r2;
117 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
125 __ PushRoot(RootIndex::kTheHoleValue);
128 __ add(r4, fp, Operand(StandardFrameConstants::kCallerSPOffset));
145 __ ldr(scratch, MemOperand(r4, r5, LSL, kPointerSizeLog2));
148 __ sub(r5, r5, Operand(1), SetCC);
155 ParameterCount actual(r0);
156 __ InvokeFunction(r1, r3, actual, CALL_FUNCTION);
159 __ ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
161 __ ldr(scratch, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
166 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
167 __ add(sp, sp, Operand(scratch, LSL, kPointerSizeLog2 - kSmiTagSize));
168 __ add(sp, sp, Operand(kPointerSize));
172 void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
173 Register scratch, Label* stack_overflow) {
177 __ LoadRoot(scratch, RootIndex::kRealStackLimit);
180 __ sub(scratch, sp, scratch);
182 __ cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
183 __ b(le, stack_overflow);
189 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
201 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
202 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
205 __ LoadRoot(r4, RootIndex::kTheHoleValue);
207 __ Push(cp, r0, r1, r4, r3);
217 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
218 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kFlagsOffset));
219 __ tst(r4, Operand(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
220 __ b(ne, ¬_create_implicit_receiver);
223 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
225 __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
226 RelocInfo::CODE_TARGET);
227 __ b(&post_instantiation_deopt_entry);
230 __ bind(¬_create_implicit_receiver);
231 __ LoadRoot(r0, RootIndex::kTheHoleValue);
241 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
243 __ bind(&post_instantiation_deopt_entry);
263 __ ldr(r1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
264 __ ldr(r0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
268 __ add(r4, fp, Operand(StandardFrameConstants::kCallerSPOffset));
270 Label enough_stack_space, stack_overflow;
271 Generate_StackOverflowCheck(masm, r0, r5, &stack_overflow);
272 __ b(&enough_stack_space);
274 __ bind(&stack_overflow);
276 __ ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
277 __ CallRuntime(Runtime::kThrowStackOverflow);
281 __ bind(&enough_stack_space);
301 __ ldr(r6, MemOperand(r4, r5, LSL, kPointerSizeLog2));
304 __ sub(r5, r5, Operand(1), SetCC);
308 ParameterCount actual(r0);
309 __ InvokeFunction(r1, r3, actual, CALL_FUNCTION);
321 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
325 __ ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
330 Label use_receiver, do_throw, leave_frame;
333 __ JumpIfRoot(r0, RootIndex::kUndefinedValue, &use_receiver);
339 __ JumpIfSmi(r0, &use_receiver);
343 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
344 __ CompareObjectType(r0, r4, r5, FIRST_JS_RECEIVER_TYPE);
345 __ b(ge, &leave_frame);
349 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
353 __ bind(&use_receiver);
354 __ ldr(r0, MemOperand(sp, 0 * kPointerSize));
355 __ JumpIfRoot(r0, RootIndex::kTheHoleValue, &do_throw);
357 __ bind(&leave_frame);
359 __ ldr(r1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
363 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
364 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
365 __ add(sp, sp, Operand(kPointerSize));
369 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
370 Generate_JSBuiltinsConstructStubHelper(masm);
373 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
378 __ CompareObjectType(sfi_data, scratch1, scratch1, INTERPRETER_DATA_TYPE);
381 FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
387 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
393 __ AssertGeneratorObject(r1);
396 __ str(r0, FieldMemOperand(r1, JSGeneratorObject::kInputOrDebugPosOffset));
397 __ RecordWriteField(r1, JSGeneratorObject::kInputOrDebugPosOffset, r0, r3,
398 kLRHasNotBeenSaved, kDontSaveFPRegs);
401 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
402 __ ldr(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
404 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
405 Label stepping_prepared;
406 Register scratch = r5;
409 ExternalReference debug_hook =
410 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
411 __ Move(scratch, debug_hook);
412 __ ldrsb(scratch, MemOperand(scratch));
413 __ cmp(scratch, Operand(0));
414 __ b(ne, &prepare_step_in_if_stepping);
418 ExternalReference debug_suspended_generator =
419 ExternalReference::debug_suspended_generator_address(masm->isolate());
420 __ Move(scratch, debug_suspended_generator);
421 __ ldr(scratch, MemOperand(scratch));
422 __ cmp(scratch, Operand(r1));
423 __ b(eq, &prepare_step_in_suspended_generator);
424 __ bind(&stepping_prepared);
428 Label stack_overflow;
429 __ CompareRoot(sp, RootIndex::kRealStackLimit);
430 __ b(lo, &stack_overflow);
433 __ ldr(scratch, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
445 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
447 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
449 FieldMemOperand(r1, JSGeneratorObject::kParametersAndRegistersOffset));
451 Label done_loop, loop;
452 __ mov(r6, Operand(0));
456 __ b(ge, &done_loop);
457 __ add(scratch, r2, Operand(r6, LSL, kPointerSizeLog2));
458 __ ldr(scratch, FieldMemOperand(scratch, FixedArray::kHeaderSize));
460 __ add(r6, r6, Operand(1));
467 if (FLAG_debug_code) {
468 __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
469 __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
470 GetSharedFunctionInfoBytecode(masm, r3, r0);
471 __ CompareObjectType(r3, r3, r3, BYTECODE_ARRAY_TYPE);
472 __ Assert(eq, AbortReason::kMissingBytecodeArray);
477 __ ldr(r0, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
478 __ ldrh(r0, FieldMemOperand(
479 r0, SharedFunctionInfo::kFormalParameterCountOffset));
485 static_assert(kJavaScriptCallCodeStartRegister == r2,
"ABI mismatch");
486 __ ldr(r2, FieldMemOperand(r1, JSFunction::kCodeOffset));
487 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
491 __ bind(&prepare_step_in_if_stepping);
493 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
496 __ PushRoot(RootIndex::kTheHoleValue);
497 __ CallRuntime(Runtime::kDebugOnFunctionCall);
499 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
501 __ b(&stepping_prepared);
503 __ bind(&prepare_step_in_suspended_generator);
505 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
507 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
509 __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
511 __ b(&stepping_prepared);
513 __ bind(&stack_overflow);
515 FrameScope scope(masm, StackFrame::INTERNAL);
516 __ CallRuntime(Runtime::kThrowStackOverflow);
521 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
522 FrameScope scope(masm, StackFrame::INTERNAL);
524 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
527 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
539 FrameScope scope(masm, StackFrame::INTERNAL);
542 ExternalReference context_address = ExternalReference::Create(
543 IsolateAddressId::kContextAddress, masm->isolate());
544 __ Move(cp, context_address);
545 __ ldr(cp, MemOperand(cp));
552 Label enough_stack_space, stack_overflow;
553 Generate_StackOverflowCheck(masm, r3, r2, &stack_overflow);
554 __ b(&enough_stack_space);
555 __ bind(&stack_overflow);
556 __ CallRuntime(Runtime::kThrowStackOverflow);
560 __ bind(&enough_stack_space);
570 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
574 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex));
575 __ ldr(r0, MemOperand(r0));
582 __ mov(r0, Operand(r3));
583 __ mov(r3, Operand(r5));
587 __ LoadRoot(r4, RootIndex::kUndefinedValue);
588 __ mov(r5, Operand(r4));
589 __ mov(r6, Operand(r4));
590 __ mov(r8, Operand(r4));
591 if (kR9Available == 1) {
592 __ mov(r9, Operand(r4));
596 Handle<Code> builtin = is_construct
597 ? BUILTIN_CODE(masm->isolate(), Construct)
598 : masm->isolate()->builtins()->Call();
599 __ Call(builtin, RelocInfo::CODE_TARGET);
610 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
611 Generate_JSEntryTrampolineHelper(masm,
false);
614 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
615 Generate_JSEntryTrampolineHelper(masm,
true);
618 static void ReplaceClosureCodeWithOptimizedCode(
619 MacroAssembler* masm, Register optimized_code, Register closure,
620 Register scratch1, Register scratch2, Register scratch3) {
622 __ str(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset));
623 __ mov(scratch1, optimized_code);
624 __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
625 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
629 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
630 Register args_count = scratch;
634 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
636 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
639 __ LeaveFrame(StackFrame::INTERPRETED);
642 __ add(sp, sp, args_count, LeaveCC);
646 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
648 OptimizationMarker marker,
649 Runtime::FunctionId function_id) {
651 __ cmp(smi_entry, Operand(Smi::FromEnum(marker)));
653 GenerateTailCallToReturnedCode(masm, function_id);
657 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
658 Register feedback_vector,
659 Register scratch1, Register scratch2,
668 !AreAliased(feedback_vector, r0, r1, r3, scratch1, scratch2, scratch3));
670 Label optimized_code_slot_is_weak_ref, fallthrough;
672 Register closure = r1;
673 Register optimized_code_entry = scratch1;
676 optimized_code_entry,
677 FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
682 __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
688 __ cmp(optimized_code_entry,
689 Operand(Smi::FromEnum(OptimizationMarker::kNone)));
690 __ b(eq, &fallthrough);
695 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
696 OptimizationMarker::kLogFirstExecution,
697 Runtime::kFunctionFirstExecution);
698 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
699 OptimizationMarker::kCompileOptimized,
700 Runtime::kCompileOptimized_NotConcurrent);
701 TailCallRuntimeIfMarkerEquals(
702 masm, optimized_code_entry,
703 OptimizationMarker::kCompileOptimizedConcurrent,
704 Runtime::kCompileOptimized_Concurrent);
709 if (FLAG_debug_code) {
711 optimized_code_entry,
712 Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
713 __ Assert(eq, AbortReason::kExpectedOptimizationSentinel);
715 __ jmp(&fallthrough);
721 __ bind(&optimized_code_slot_is_weak_ref);
723 __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
727 Label found_deoptimized_code;
728 __ ldr(scratch2, FieldMemOperand(optimized_code_entry,
729 Code::kCodeDataContainerOffset));
732 FieldMemOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset));
733 __ tst(scratch2, Operand(1 << Code::kMarkedForDeoptimizationBit));
734 __ b(ne, &found_deoptimized_code);
740 ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
741 scratch2, scratch3, feedback_vector);
742 static_assert(kJavaScriptCallCodeStartRegister == r2,
"ABI mismatch");
743 __ add(r2, optimized_code_entry,
744 Operand(Code::kHeaderSize - kHeapObjectTag));
749 __ bind(&found_deoptimized_code);
750 GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
755 __ bind(&fallthrough);
761 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
762 Register bytecode_array,
763 Register bytecode_offset,
764 Register bytecode, Register scratch1,
766 Register bytecode_size_table = scratch1;
767 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
770 __ Move(bytecode_size_table,
771 ExternalReference::bytecode_size_table_address());
774 Label process_bytecode, extra_wide;
775 STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
776 STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
777 STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
779 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
780 __ cmp(bytecode, Operand(0x3));
781 __ b(hi, &process_bytecode);
782 __ tst(bytecode, Operand(0x1));
783 __ b(ne, &extra_wide);
786 __ add(bytecode_offset, bytecode_offset, Operand(1));
787 __ ldrb(bytecode, MemOperand(bytecode_array, bytecode_offset));
788 __ add(bytecode_size_table, bytecode_size_table,
789 Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
790 __ jmp(&process_bytecode);
792 __ bind(&extra_wide);
794 __ add(bytecode_offset, bytecode_offset, Operand(1));
795 __ ldrb(bytecode, MemOperand(bytecode_array, bytecode_offset));
796 __ add(bytecode_size_table, bytecode_size_table,
797 Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
799 __ bind(&process_bytecode);
802 #define JUMP_IF_EQUAL(NAME) \ 803 __ cmp(bytecode, Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \ 805 RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
809 __ ldr(scratch1, MemOperand(bytecode_size_table, bytecode, LSL, 2));
810 __ add(bytecode_offset, bytecode_offset, scratch1);
828 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
829 Register closure = r1;
830 Register feedback_vector = r2;
833 __ ldr(feedback_vector,
834 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
835 __ ldr(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset));
837 Label push_stack_frame;
840 __ CompareRoot(feedback_vector, RootIndex::kUndefinedValue);
841 __ b(eq, &push_stack_frame);
845 MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, r4, r6, r5);
848 __ ldr(r9, FieldMemOperand(feedback_vector,
849 FeedbackVector::kInvocationCountOffset));
850 __ add(r9, r9, Operand(1));
851 __ str(r9, FieldMemOperand(feedback_vector,
852 FeedbackVector::kInvocationCountOffset));
857 __ bind(&push_stack_frame);
858 FrameScope frame_scope(masm, StackFrame::MANUAL);
859 __ PushStandardFrame(closure);
863 __ ldr(r0, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
864 __ ldr(kInterpreterBytecodeArrayRegister,
865 FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset));
866 GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, r4);
869 if (FLAG_debug_code) {
870 __ SmiTst(kInterpreterBytecodeArrayRegister);
872 ne, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
873 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r0, no_reg,
874 BYTECODE_ARRAY_TYPE);
876 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
880 __ mov(r9, Operand(BytecodeArray::kNoAgeBytecodeAge));
881 __ strb(r9, FieldMemOperand(kInterpreterBytecodeArrayRegister,
882 BytecodeArray::kBytecodeAgeOffset));
885 __ mov(kInterpreterBytecodeOffsetRegister,
886 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
889 __ SmiTag(r0, kInterpreterBytecodeOffsetRegister);
890 __ Push(kInterpreterBytecodeArrayRegister, r0);
895 __ ldr(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
896 BytecodeArray::kFrameSizeOffset));
900 __ sub(r9, sp, Operand(r4));
901 __ LoadRoot(r2, RootIndex::kRealStackLimit);
902 __ cmp(r9, Operand(r2));
904 __ CallRuntime(Runtime::kThrowStackOverflow);
910 __ LoadRoot(r9, RootIndex::kUndefinedValue);
911 __ b(&loop_check, al);
912 __ bind(&loop_header);
916 __ bind(&loop_check);
917 __ sub(r4, r4, Operand(kPointerSize), SetCC);
918 __ b(&loop_header, ge);
923 __ ldr(r9, FieldMemOperand(
924 kInterpreterBytecodeArrayRegister,
925 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
926 __ cmp(r9, Operand::Zero());
927 __ str(r3, MemOperand(fp, r9, LSL, kPointerSizeLog2), ne);
930 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
935 __ bind(&do_dispatch);
937 kInterpreterDispatchTableRegister,
938 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
939 __ ldrb(r4, MemOperand(kInterpreterBytecodeArrayRegister,
940 kInterpreterBytecodeOffsetRegister));
942 kJavaScriptCallCodeStartRegister,
943 MemOperand(kInterpreterDispatchTableRegister, r4, LSL, kPointerSizeLog2));
944 __ Call(kJavaScriptCallCodeStartRegister);
945 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
951 __ ldr(kInterpreterBytecodeArrayRegister,
952 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
953 __ ldr(kInterpreterBytecodeOffsetRegister,
954 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
955 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
959 __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
960 kInterpreterBytecodeOffsetRegister));
961 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
962 kInterpreterBytecodeOffsetRegister, r1, r2,
964 __ jmp(&do_dispatch);
968 LeaveInterpreterFrame(masm, r2);
972 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
973 Register num_args, Register index,
974 Register limit, Register scratch) {
976 __ mov(limit, num_args);
977 __ mov(limit, Operand(limit, LSL, kPointerSizeLog2));
978 __ sub(limit, index, limit);
980 Label loop_header, loop_check;
981 __ b(al, &loop_check);
982 __ bind(&loop_header);
983 __ ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
985 __ bind(&loop_check);
986 __ cmp(index, limit);
987 __ b(gt, &loop_header);
991 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
992 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
993 InterpreterPushArgsMode mode) {
994 DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1002 Label stack_overflow;
1004 __ add(r3, r0, Operand(1));
1006 Generate_StackOverflowCheck(masm, r3, r4, &stack_overflow);
1009 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1010 __ PushRoot(RootIndex::kUndefinedValue);
1015 Generate_InterpreterPushArgs(masm, r3, r2, r4, r5);
1017 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1019 __ sub(r0, r0, Operand(1));
1023 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1024 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1025 RelocInfo::CODE_TARGET);
1027 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1028 RelocInfo::CODE_TARGET);
1031 __ bind(&stack_overflow);
1033 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1040 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1041 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1049 Label stack_overflow;
1052 __ mov(r5, Operand::Zero());
1055 Generate_StackOverflowCheck(masm, r0, r5, &stack_overflow);
1058 Generate_InterpreterPushArgs(masm, r0, r4, r5, r6);
1060 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1062 __ sub(r0, r0, Operand(1));
1064 __ AssertUndefinedOrAllocationSite(r2, r5);
1067 if (mode == InterpreterPushArgsMode::kArrayFunction) {
1068 __ AssertFunction(r1);
1072 Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1073 __ Jump(code, RelocInfo::CODE_TARGET);
1074 }
else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1076 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1077 RelocInfo::CODE_TARGET);
1079 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1081 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1084 __ bind(&stack_overflow);
1086 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1092 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1095 Label builtin_trampoline, trampoline_loaded;
1096 Smi interpreter_entry_return_pc_offset(
1097 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1098 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1104 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1105 __ ldr(r2, FieldMemOperand(r2, JSFunction::kSharedFunctionInfoOffset));
1106 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kFunctionDataOffset));
1107 __ CompareObjectType(r2, kInterpreterDispatchTableRegister,
1108 kInterpreterDispatchTableRegister,
1109 INTERPRETER_DATA_TYPE);
1110 __ b(ne, &builtin_trampoline);
1113 FieldMemOperand(r2, InterpreterData::kInterpreterTrampolineOffset));
1114 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
1115 __ b(&trampoline_loaded);
1117 __ bind(&builtin_trampoline);
1118 __ Move(r2, ExternalReference::
1119 address_of_interpreter_entry_trampoline_instruction_start(
1121 __ ldr(r2, MemOperand(r2));
1123 __ bind(&trampoline_loaded);
1124 __ add(lr, r2, Operand(interpreter_entry_return_pc_offset->value()));
1128 kInterpreterDispatchTableRegister,
1129 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1132 __ ldr(kInterpreterBytecodeArrayRegister,
1133 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1135 if (FLAG_debug_code) {
1137 __ SmiTst(kInterpreterBytecodeArrayRegister);
1139 ne, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1140 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r1, no_reg,
1141 BYTECODE_ARRAY_TYPE);
1143 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1147 __ ldr(kInterpreterBytecodeOffsetRegister,
1148 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1149 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1152 UseScratchRegisterScope temps(masm);
1153 Register scratch = temps.Acquire();
1154 __ ldrb(scratch, MemOperand(kInterpreterBytecodeArrayRegister,
1155 kInterpreterBytecodeOffsetRegister));
1156 __ ldr(kJavaScriptCallCodeStartRegister,
1157 MemOperand(kInterpreterDispatchTableRegister, scratch, LSL,
1159 __ Jump(kJavaScriptCallCodeStartRegister);
1162 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1164 __ ldr(kInterpreterBytecodeArrayRegister,
1165 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1166 __ ldr(kInterpreterBytecodeOffsetRegister,
1167 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1168 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1171 __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
1172 kInterpreterBytecodeOffsetRegister));
1176 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1177 kInterpreterBytecodeOffsetRegister, r1, r2,
1181 __ SmiTag(r2, kInterpreterBytecodeOffsetRegister);
1182 __ str(r2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1184 Generate_InterpreterEnterBytecode(masm);
1187 __ bind(&if_return);
1188 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1191 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1192 Generate_InterpreterEnterBytecode(masm);
1195 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1203 FrameScope scope(masm, StackFrame::INTERNAL);
1217 for (
int j = 0; j < 4; ++j) {
1220 __ cmp(r4, Operand(j));
1223 for (
int i = j - 1;
i >= 0; --
i) {
1224 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1228 for (
int i = 0;
i < 3 - j; ++
i) {
1229 __ PushRoot(RootIndex::kUndefinedValue);
1236 __ bind(&args_done);
1239 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1241 __ JumpIfSmi(r0, &failed);
1246 scope.GenerateLeaveFrame();
1248 __ add(r4, r4, Operand(1));
1261 static_assert(kJavaScriptCallCodeStartRegister == r2,
"ABI mismatch");
1262 __ ldr(r2, FieldMemOperand(r1, JSFunction::kCodeOffset));
1263 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
1268 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1269 bool java_script_builtin,
1271 const RegisterConfiguration* config(RegisterConfiguration::Default());
1272 int allocatable_register_count = config->num_allocatable_general_registers();
1278 sp, config->num_allocatable_general_registers() * kPointerSize +
1279 BuiltinContinuationFrameConstants::kFixedFrameSize));
1281 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
1282 int code = config->GetAllocatableGeneralCode(
i);
1283 __ Pop(Register::from_code(code));
1284 if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1285 __ SmiUntag(Register::from_code(code));
1288 __ ldr(fp, MemOperand(
1289 sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1291 UseScratchRegisterScope temps(masm);
1292 Register scratch = temps.Acquire();
1295 Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1297 __ add(pc, scratch, Operand(Code::kHeaderSize - kHeapObjectTag));
1301 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1302 Generate_ContinueToBuiltinHelper(masm,
false,
false);
1305 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1306 MacroAssembler* masm) {
1307 Generate_ContinueToBuiltinHelper(masm,
false,
true);
1310 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1311 Generate_ContinueToBuiltinHelper(masm,
true,
false);
1314 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1315 MacroAssembler* masm) {
1316 Generate_ContinueToBuiltinHelper(masm,
true,
true);
1319 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1321 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1322 __ CallRuntime(Runtime::kNotifyDeoptimized);
1325 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r0.code());
1330 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1332 __ ldr(r0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1333 __ ldr(r0, MemOperand(r0, JavaScriptFrameConstants::kFunctionOffset));
1336 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1339 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1344 __ cmp(r0, Operand(Smi::zero()));
1352 __ LeaveFrame(StackFrame::STUB);
1356 __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
1359 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1360 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1364 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(
1365 DeoptimizationData::kOsrPcOffsetIndex)));
1368 __ add(lr, r0, Operand::SmiUntag(r1));
1376 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1388 __ LoadRoot(r5, RootIndex::kUndefinedValue);
1390 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1391 __ sub(r4, r0, Operand(1), SetCC);
1392 __ ldr(r5, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);
1393 __ sub(r4, r4, Operand(1), SetCC, ge);
1394 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);
1395 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1396 __ str(r5, MemOperand(sp, 0));
1411 __ JumpIfRoot(r2, RootIndex::kNullValue, &no_arguments);
1412 __ JumpIfRoot(r2, RootIndex::kUndefinedValue, &no_arguments);
1415 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1416 RelocInfo::CODE_TARGET);
1420 __ bind(&no_arguments);
1422 __ mov(r0, Operand(0));
1423 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1428 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1433 __ cmp(r0, Operand::Zero());
1435 __ PushRoot(RootIndex::kUndefinedValue);
1436 __ add(r0, r0, Operand(1));
1442 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1450 Register scratch = r3;
1453 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1456 __ ldr(scratch, MemOperand(r2, -kPointerSize));
1457 __ str(scratch, MemOperand(r2));
1458 __ sub(r2, r2, Operand(kPointerSize));
1463 __ sub(r0, r0, Operand(1));
1468 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1471 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1484 __ LoadRoot(r1, RootIndex::kUndefinedValue);
1487 __ sub(r4, r0, Operand(1), SetCC);
1488 __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);
1489 __ sub(r4, r4, Operand(1), SetCC, ge);
1490 __ ldr(r5, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);
1491 __ sub(r4, r4, Operand(1), SetCC, ge);
1492 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);
1493 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1494 __ str(r5, MemOperand(sp, 0));
1508 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1509 RelocInfo::CODE_TARGET);
1512 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1526 __ LoadRoot(r1, RootIndex::kUndefinedValue);
1528 __ str(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1529 __ sub(r4, r0, Operand(1), SetCC);
1530 __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);
1532 __ sub(r4, r4, Operand(1), SetCC, ge);
1533 __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);
1534 __ sub(r4, r4, Operand(1), SetCC, ge);
1535 __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);
1536 __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1555 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1556 RelocInfo::CODE_TARGET);
1559 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1561 __ mov(r4, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1562 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
1563 fp.bit() | lr.bit());
1564 __ Push(Smi::zero());
1566 Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
1569 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1575 __ ldr(r1, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1577 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
1578 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
1579 __ add(sp, sp, Operand(kPointerSize));
1583 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1584 Handle<Code> code) {
1592 Register scratch = r8;
1594 if (masm->emit_debug_code()) {
1597 __ AssertNotSmi(r2);
1598 __ ldr(scratch, FieldMemOperand(r2, HeapObject::kMapOffset));
1599 __ ldrh(r6, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1600 __ cmp(r6, Operand(FIXED_ARRAY_TYPE));
1602 __ cmp(r6, Operand(FIXED_DOUBLE_ARRAY_TYPE));
1604 __ cmp(r4, Operand(0));
1608 __ Abort(AbortReason::kOperandIsNotAFixedArray);
1613 Label stack_overflow;
1614 Generate_StackOverflowCheck(masm, r4, scratch, &stack_overflow);
1618 __ mov(r6, Operand(0));
1619 __ LoadRoot(r5, RootIndex::kTheHoleValue);
1624 __ add(scratch, r2, Operand(r6, LSL, kPointerSizeLog2));
1625 __ ldr(scratch, FieldMemOperand(scratch, FixedArray::kHeaderSize));
1626 __ cmp(scratch, r5);
1627 __ LoadRoot(scratch, RootIndex::kUndefinedValue, eq);
1629 __ add(r6, r6, Operand(1));
1636 __ Jump(code, RelocInfo::CODE_TARGET);
1638 __ bind(&stack_overflow);
1639 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1643 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1644 CallOrConstructMode mode,
1645 Handle<Code> code) {
1653 Register scratch = r6;
1656 if (mode == CallOrConstructMode::kConstruct) {
1657 Label new_target_constructor, new_target_not_constructor;
1658 __ JumpIfSmi(r3, &new_target_not_constructor);
1659 __ ldr(scratch, FieldMemOperand(r3, HeapObject::kMapOffset));
1660 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1661 __ tst(scratch, Operand(Map::IsConstructorBit::kMask));
1662 __ b(ne, &new_target_constructor);
1663 __ bind(&new_target_not_constructor);
1665 FrameScope scope(masm, StackFrame::MANUAL);
1666 __ EnterFrame(StackFrame::INTERNAL);
1668 __ CallRuntime(Runtime::kThrowNotConstructor);
1670 __ bind(&new_target_constructor);
1674 Label arguments_adaptor, arguments_done;
1675 __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1677 MemOperand(r4, CommonFrameConstants::kContextOrFrameTypeOffset));
1679 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1680 __ b(eq, &arguments_adaptor);
1682 __ ldr(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1683 __ ldr(r5, FieldMemOperand(r5, JSFunction::kSharedFunctionInfoOffset));
1684 __ ldrh(r5, FieldMemOperand(
1685 r5, SharedFunctionInfo::kFormalParameterCountOffset));
1688 __ b(&arguments_done);
1689 __ bind(&arguments_adaptor);
1692 __ ldr(r5, MemOperand(r4, ArgumentsAdaptorFrameConstants::kLengthOffset));
1695 __ bind(&arguments_done);
1697 Label stack_done, stack_overflow;
1698 __ sub(r5, r5, r2, SetCC);
1699 __ b(le, &stack_done);
1702 Generate_StackOverflowCheck(masm, r5, r2, &stack_overflow);
1707 __ add(r4, r4, Operand(kPointerSize));
1711 __ ldr(scratch, MemOperand(r4, r5, LSL, kPointerSizeLog2));
1713 __ sub(r5, r5, Operand(1), SetCC);
1719 __ bind(&stack_overflow);
1720 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1721 __ bind(&stack_done);
1724 __ Jump(code, RelocInfo::CODE_TARGET);
1728 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1729 ConvertReceiverMode mode) {
1734 __ AssertFunction(r1);
1738 Label class_constructor;
1739 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1740 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kFlagsOffset));
1741 __ tst(r3, Operand(SharedFunctionInfo::IsClassConstructorBit::kMask));
1742 __ b(ne, &class_constructor);
1747 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1750 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kFlagsOffset));
1751 __ tst(r3, Operand(SharedFunctionInfo::IsNativeBit::kMask |
1752 SharedFunctionInfo::IsStrictBit::kMask));
1753 __ b(ne, &done_convert);
1762 if (mode == ConvertReceiverMode::kNullOrUndefined) {
1764 __ LoadGlobalProxy(r3);
1766 Label convert_to_object, convert_receiver;
1767 __ ldr(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1768 __ JumpIfSmi(r3, &convert_to_object);
1769 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1770 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
1771 __ b(hs, &done_convert);
1772 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1773 Label convert_global_proxy;
1774 __ JumpIfRoot(r3, RootIndex::kUndefinedValue, &convert_global_proxy);
1775 __ JumpIfNotRoot(r3, RootIndex::kNullValue, &convert_to_object);
1776 __ bind(&convert_global_proxy);
1779 __ LoadGlobalProxy(r3);
1781 __ b(&convert_receiver);
1783 __ bind(&convert_to_object);
1788 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1793 __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1794 RelocInfo::CODE_TARGET);
1800 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1801 __ bind(&convert_receiver);
1803 __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1805 __ bind(&done_convert);
1815 FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
1816 ParameterCount actual(r0);
1817 ParameterCount expected(r2);
1818 __ InvokeFunctionCode(r1, no_reg, expected, actual, JUMP_FUNCTION);
1821 __ bind(&class_constructor);
1823 FrameScope frame(masm, StackFrame::INTERNAL);
1825 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
1831 void Generate_PushBoundArguments(MacroAssembler* masm) {
1839 Label no_bound_arguments;
1840 __ ldr(r2, FieldMemOperand(r1, JSBoundFunction::kBoundArgumentsOffset));
1841 __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
1843 __ cmp(r4, Operand(0));
1844 __ b(eq, &no_bound_arguments);
1857 __ sub(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
1861 __ CompareRoot(sp, RootIndex::kRealStackLimit);
1864 __ add(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
1866 FrameScope scope(masm, StackFrame::MANUAL);
1867 __ EnterFrame(StackFrame::INTERNAL);
1868 __ CallRuntime(Runtime::kThrowStackOverflow);
1873 Register scratch = r6;
1877 Label loop, done_loop;
1878 __ mov(r5, Operand(0));
1881 __ b(gt, &done_loop);
1882 __ ldr(scratch, MemOperand(sp, r4, LSL, kPointerSizeLog2));
1883 __ str(scratch, MemOperand(sp, r5, LSL, kPointerSizeLog2));
1884 __ add(r4, r4, Operand(1));
1885 __ add(r5, r5, Operand(1));
1887 __ bind(&done_loop);
1893 __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
1895 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1897 __ sub(r4, r4, Operand(1), SetCC);
1898 __ ldr(scratch, MemOperand(r2, r4, LSL, kPointerSizeLog2));
1899 __ str(scratch, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1900 __ add(r0, r0, Operand(1));
1904 __ bind(&no_bound_arguments);
1910 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
1915 __ AssertBoundFunction(r1);
1918 __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundThisOffset));
1919 __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1922 Generate_PushBoundArguments(masm);
1925 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
1926 __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
1927 RelocInfo::CODE_TARGET);
1931 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
1937 Label non_callable, non_function, non_smi;
1938 __ JumpIfSmi(r1, &non_callable);
1940 __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
1941 __ Jump(masm->isolate()->builtins()->CallFunction(mode),
1942 RelocInfo::CODE_TARGET, eq);
1943 __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
1944 __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
1945 RelocInfo::CODE_TARGET, eq);
1948 __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1949 __ tst(r4, Operand(Map::IsCallableBit::kMask));
1950 __ b(eq, &non_callable);
1953 __ cmp(r5, Operand(JS_PROXY_TYPE));
1954 __ b(ne, &non_function);
1955 __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
1959 __ bind(&non_function);
1961 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1963 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r1);
1964 __ Jump(masm->isolate()->builtins()->CallFunction(
1965 ConvertReceiverMode::kNotNullOrUndefined),
1966 RelocInfo::CODE_TARGET);
1969 __ bind(&non_callable);
1971 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1973 __ CallRuntime(Runtime::kThrowCalledNonCallable);
1978 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
1984 __ AssertConstructor(r1);
1985 __ AssertFunction(r1);
1989 __ LoadRoot(r2, RootIndex::kUndefinedValue);
1991 Label call_generic_stub;
1994 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1995 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kFlagsOffset));
1996 __ tst(r4, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1997 __ b(eq, &call_generic_stub);
1999 __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2000 RelocInfo::CODE_TARGET);
2002 __ bind(&call_generic_stub);
2003 __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2004 RelocInfo::CODE_TARGET);
2008 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2014 __ AssertConstructor(r1);
2015 __ AssertBoundFunction(r1);
2018 Generate_PushBoundArguments(masm);
2022 __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset),
2026 __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2027 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2031 void Builtins::Generate_Construct(MacroAssembler* masm) {
2040 Label non_constructor, non_proxy;
2041 __ JumpIfSmi(r1, &non_constructor);
2044 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
2045 __ ldrb(r2, FieldMemOperand(r4, Map::kBitFieldOffset));
2046 __ tst(r2, Operand(Map::IsConstructorBit::kMask));
2047 __ b(eq, &non_constructor);
2050 __ CompareInstanceType(r4, r5, JS_FUNCTION_TYPE);
2051 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2052 RelocInfo::CODE_TARGET, eq);
2056 __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
2057 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2058 RelocInfo::CODE_TARGET, eq);
2061 __ cmp(r5, Operand(JS_PROXY_TYPE));
2062 __ b(ne, &non_proxy);
2063 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2064 RelocInfo::CODE_TARGET);
2067 __ bind(&non_proxy);
2070 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2072 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r1);
2073 __ Jump(masm->isolate()->builtins()->CallFunction(),
2074 RelocInfo::CODE_TARGET);
2079 __ bind(&non_constructor);
2080 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2081 RelocInfo::CODE_TARGET);
2084 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2092 Label invoke, dont_adapt_arguments, stack_overflow;
2094 Label enough, too_few;
2095 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2096 __ b(eq, &dont_adapt_arguments);
2100 Register scratch = r5;
2104 EnterArgumentsAdaptorFrame(masm);
2105 Generate_StackOverflowCheck(masm, r2, scratch, &stack_overflow);
2112 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
2114 __ add(r0, r0, Operand(2 * kPointerSize));
2115 __ sub(r4, r0, Operand(r2, LSL, kPointerSizeLog2));
2126 __ ldr(scratch, MemOperand(r0, 0));
2129 __ sub(r0, r0, Operand(kPointerSize));
2137 EnterArgumentsAdaptorFrame(masm);
2138 Generate_StackOverflowCheck(masm, r2, scratch, &stack_overflow);
2145 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
2156 __ ldr(scratch, MemOperand(r0, 2 * kPointerSize));
2160 __ sub(r0, r0, Operand(kPointerSize));
2167 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
2168 __ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2));
2171 Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
2187 static_assert(kJavaScriptCallCodeStartRegister == r2,
"ABI mismatch");
2188 __ ldr(r2, FieldMemOperand(r1, JSFunction::kCodeOffset));
2189 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
2193 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2196 LeaveArgumentsAdaptorFrame(masm);
2202 __ bind(&dont_adapt_arguments);
2203 static_assert(kJavaScriptCallCodeStartRegister == r2,
"ABI mismatch");
2204 __ ldr(r2, FieldMemOperand(r1, JSFunction::kCodeOffset));
2205 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
2208 __ bind(&stack_overflow);
2210 FrameScope frame(masm, StackFrame::MANUAL);
2211 __ CallRuntime(Runtime::kThrowStackOverflow);
2216 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2219 __ SmiTag(kWasmCompileLazyFuncIndexRegister,
2220 kWasmCompileLazyFuncIndexRegister);
2222 HardAbortScope hard_abort(masm);
2223 FrameAndConstantPoolScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2228 constexpr RegList gp_regs = Register::ListOf<r0, r1, r2, r3>();
2229 constexpr DwVfpRegister lowest_fp_reg = d0;
2230 constexpr DwVfpRegister highest_fp_reg = d7;
2232 __ stm(db_w, sp, gp_regs);
2233 __ vstm(db_w, sp, lowest_fp_reg, highest_fp_reg);
2237 __ push(kWasmInstanceRegister);
2238 __ push(kWasmCompileLazyFuncIndexRegister);
2240 __ ldr(r2, FieldMemOperand(kWasmInstanceRegister,
2241 WasmInstanceObject::kCEntryStubOffset));
2244 __ Move(cp, Smi::zero());
2245 __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, r2);
2247 __ mov(r8, kReturnRegister0);
2250 __ vldm(ia_w, sp, lowest_fp_reg, highest_fp_reg);
2251 __ ldm(ia_w, sp, gp_regs);
2257 void Builtins::Generate_CEntry(MacroAssembler* masm,
int result_size,
2258 SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2259 bool builtin_exit_frame) {
2270 __ mov(r5, Operand(r1));
2272 if (argv_mode == kArgvInRegister) {
2274 __ mov(r1, Operand(r2));
2277 __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2));
2278 __ sub(r1, r1, Operand(kPointerSize));
2282 FrameScope scope(masm, StackFrame::MANUAL);
2284 save_doubles == kSaveFPRegs, 0,
2285 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2288 __ mov(r4, Operand(r0));
2294 #if V8_HOST_ARCH_ARM 2295 int frame_alignment = MacroAssembler::ActivationFrameAlignment();
2296 int frame_alignment_mask = frame_alignment - 1;
2297 if (FLAG_debug_code) {
2298 if (frame_alignment > kPointerSize) {
2299 Label alignment_as_expected;
2300 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
2301 __ tst(sp, Operand(frame_alignment_mask));
2302 __ b(eq, &alignment_as_expected);
2304 __ stop(
"Unexpected alignment");
2305 __ bind(&alignment_as_expected);
2312 __ Move(r2, ExternalReference::isolate_address(masm->isolate()));
2323 Assembler::BlockConstPoolScope block_const_pool(masm);
2324 __ add(lr, pc, Operand(4));
2325 __ str(lr, MemOperand(sp));
2332 Label exception_returned;
2333 __ CompareRoot(r0, RootIndex::kException);
2334 __ b(eq, &exception_returned);
2338 if (FLAG_debug_code) {
2340 ExternalReference pending_exception_address = ExternalReference::Create(
2341 IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2342 __ Move(r3, pending_exception_address);
2343 __ ldr(r3, MemOperand(r3));
2344 __ CompareRoot(r3, RootIndex::kTheHoleValue);
2347 __ stop(
"Unexpected pending exception");
2355 Register argc = argv_mode == kArgvInRegister
2360 __ LeaveExitFrame(save_doubles == kSaveFPRegs, argc);
2364 __ bind(&exception_returned);
2366 ExternalReference pending_handler_context_address = ExternalReference::Create(
2367 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2368 ExternalReference pending_handler_entrypoint_address =
2369 ExternalReference::Create(
2370 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2371 ExternalReference pending_handler_fp_address = ExternalReference::Create(
2372 IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2373 ExternalReference pending_handler_sp_address = ExternalReference::Create(
2374 IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2378 ExternalReference find_handler =
2379 ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2381 FrameScope scope(masm, StackFrame::MANUAL);
2382 __ PrepareCallCFunction(3, 0);
2383 __ mov(r0, Operand(0));
2384 __ mov(r1, Operand(0));
2385 __ Move(r2, ExternalReference::isolate_address(masm->isolate()));
2386 __ CallCFunction(find_handler, 3);
2390 __ Move(cp, pending_handler_context_address);
2391 __ ldr(cp, MemOperand(cp));
2392 __ Move(sp, pending_handler_sp_address);
2393 __ ldr(sp, MemOperand(sp));
2394 __ Move(fp, pending_handler_fp_address);
2395 __ ldr(fp, MemOperand(fp));
2399 __ cmp(cp, Operand(0));
2400 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
2406 __ ResetSpeculationPoisonRegister();
2409 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
2410 __ Move(r1, pending_handler_entrypoint_address);
2411 __ ldr(r1, MemOperand(r1));
2415 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2418 HardAbortScope hard_abort(masm);
2419 UseScratchRegisterScope temps(masm);
2420 Register result_reg = r7;
2421 Register double_low = GetRegisterThatIsNotOneOf(result_reg);
2422 Register double_high = GetRegisterThatIsNotOneOf(result_reg, double_low);
2423 LowDwVfpRegister double_scratch = temps.AcquireLowD();
2426 __ Push(result_reg, double_high, double_low);
2429 const int kArgumentOffset = 3 * kPointerSize;
2431 MemOperand input_operand(sp, kArgumentOffset);
2432 MemOperand result_operand = input_operand;
2435 __ vldr(double_scratch, input_operand);
2436 __ vmov(double_low, double_high, double_scratch);
2439 __ TryInlineTruncateDoubleToI(result_reg, double_scratch, &done);
2441 Register scratch = temps.Acquire();
2442 __ Ubfx(scratch, double_high, HeapNumber::kExponentShift,
2443 HeapNumber::kExponentBits);
2446 STATIC_ASSERT(HeapNumber::kExponentBias + 1 == 1024);
2447 __ sub(scratch, scratch, Operand(HeapNumber::kExponentBias + 1));
2453 __ cmp(scratch, Operand(83));
2454 __ mov(result_reg, Operand::Zero(), LeaveCC, ge);
2460 if (masm->emit_debug_code()) {
2462 __ cmp(scratch, Operand(30 - 1));
2463 __ Check(ge, AbortReason::kUnexpectedValue);
2470 __ rsb(scratch, scratch, Operand(51), SetCC);
2474 __ rsb(scratch, scratch, Operand::Zero(), LeaveCC, ls);
2475 __ mov(result_reg, Operand(double_low, LSL, scratch), LeaveCC, ls);
2480 __ mov(double_low, Operand(double_low, LSR, scratch));
2484 __ rsb(scratch, scratch, Operand(32));
2485 __ Ubfx(result_reg, double_high, 0, HeapNumber::kMantissaBitsInTopWord);
2487 __ orr(result_reg, result_reg,
2488 Operand(1 << HeapNumber::kMantissaBitsInTopWord));
2489 __ orr(result_reg, double_low, Operand(result_reg, LSL, scratch));
2498 __ eor(result_reg, result_reg, Operand(double_high, ASR, 31));
2499 __ add(result_reg, result_reg, Operand(double_high, LSR, 31));
2502 __ str(result_reg, result_operand);
2505 __ Pop(result_reg, double_high, double_low);
2509 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2510 const LowDwVfpRegister double_base = d0;
2511 const LowDwVfpRegister double_exponent = d1;
2512 const LowDwVfpRegister double_result = d2;
2513 const LowDwVfpRegister double_scratch = d3;
2514 const SwVfpRegister single_scratch = s6;
2517 const Register integer_exponent = r4;
2518 const Register scratch = r5;
2520 Label call_runtime, done, int_exponent;
2523 __ TryDoubleToInt32Exact(integer_exponent, double_exponent, double_scratch);
2524 __ b(eq, &int_exponent);
2528 AllowExternalCallThatCantCauseGC scope(masm);
2529 __ PrepareCallCFunction(0, 2);
2530 __ MovToFloatParameters(double_base, double_exponent);
2531 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2534 __ MovFromFloatResult(double_result);
2538 __ bind(&int_exponent);
2540 __ vmov(double_scratch, double_base);
2541 __ vmov(double_result, Double(1.0), scratch);
2544 __ cmp(integer_exponent, Operand::Zero());
2545 __ mov(scratch, integer_exponent);
2546 __ rsb(scratch, integer_exponent, Operand::Zero(), LeaveCC, mi);
2549 __ bind(&while_true);
2550 __ mov(scratch, Operand(scratch, LSR, 1), SetCC);
2551 __ vmul(double_result, double_result, double_scratch, cs);
2552 __ vmul(double_scratch, double_scratch, double_scratch, ne);
2553 __ b(ne, &while_true);
2555 __ cmp(integer_exponent, Operand::Zero());
2557 __ vmov(double_scratch, Double(1.0), scratch);
2558 __ vdiv(double_result, double_scratch, double_result);
2561 __ VFPCompareAndSetFlags(double_result, 0.0);
2565 __ vmov(single_scratch, integer_exponent);
2566 __ vcvt_f64_s32(double_exponent, single_scratch);
2571 AllowExternalCallThatCantCauseGC scope(masm);
2572 __ PrepareCallCFunction(0, 2);
2573 __ MovToFloatParameters(double_base, double_exponent);
2574 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2577 __ MovFromFloatResult(double_result);
2585 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2586 ElementsKind kind) {
2589 __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue);
2591 __ cmp(r0, Operand(1));
2593 __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2595 RelocInfo::CODE_TARGET, lo);
2597 Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
2598 __ Jump(code, RelocInfo::CODE_TARGET, hi);
2600 if (IsFastPackedElementsKind(kind)) {
2603 __ ldr(r3, MemOperand(sp, 0));
2604 __ cmp(r3, Operand::Zero());
2606 __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2607 masm->isolate(), GetHoleyElementsKind(kind))
2609 RelocInfo::CODE_TARGET, ne);
2613 CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2615 RelocInfo::CODE_TARGET);
2620 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2628 if (FLAG_debug_code) {
2633 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
2635 __ tst(r3, Operand(kSmiTagMask));
2636 __ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction);
2637 __ CompareObjectType(r3, r3, r4, MAP_TYPE);
2638 __ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction);
2642 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
2645 __ ldr(r3, FieldMemOperand(r3, Map::kBitField2Offset));
2647 __ DecodeField<Map::ElementsKindBits>(r3);
2649 if (FLAG_debug_code) {
2651 __ cmp(r3, Operand(PACKED_ELEMENTS));
2653 __ cmp(r3, Operand(HOLEY_ELEMENTS));
2656 AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2660 Label fast_elements_case;
2661 __ cmp(r3, Operand(PACKED_ELEMENTS));
2662 __ b(eq, &fast_elements_case);
2663 GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2665 __ bind(&fast_elements_case);
2666 GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2674 #endif // V8_TARGET_ARCH_ARM