7 #include "src/base/adapters.h" 8 #include "src/code-factory.h" 9 #include "src/counters.h" 10 #include "src/deoptimizer.h" 11 #include "src/frame-constants.h" 12 #include "src/frames.h" 13 #include "src/objects-inl.h" 14 #include "src/objects/debug-objects.h" 15 #include "src/objects/js-generator.h" 16 #include "src/objects/smi.h" 17 #include "src/register-configuration.h" 18 #include "src/wasm/wasm-linkage.h" 19 #include "src/wasm/wasm-objects.h" 24 #define __ ACCESS_MASM(masm) 26 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
27 ExitFrameType exit_frame_type) {
28 __ LoadAddress(kJavaScriptCallExtraArg1Register,
29 ExternalReference::Create(address));
30 if (exit_frame_type == BUILTIN_EXIT) {
31 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
32 RelocInfo::CODE_TARGET);
34 DCHECK(exit_frame_type == EXIT);
35 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
36 RelocInfo::CODE_TARGET);
40 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
41 Runtime::FunctionId function_id) {
48 FrameScope scope(masm, StackFrame::INTERNAL);
58 __ CallRuntime(function_id, 1);
65 __ SmiUntag(rax, rax);
67 static_assert(kJavaScriptCallCodeStartRegister == rcx,
"ABI mismatch");
68 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
74 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
84 FrameScope scope(masm, StackFrame::CONSTRUCT);
92 __ PushRoot(RootIndex::kTheHoleValue);
95 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
112 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
115 __ j(greater_equal, &loop, Label::kNear);
121 ParameterCount actual(rax);
122 __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
125 __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
127 __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
133 __ PopReturnAddressTo(rcx);
134 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
135 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
136 __ PushReturnAddressFrom(rcx);
141 void Generate_StackOverflowCheck(
142 MacroAssembler* masm, Register num_args, Register scratch,
143 Label* stack_overflow,
144 Label::Distance stack_overflow_distance = Label::kFar) {
148 __ LoadRoot(kScratchRegister, RootIndex::kRealStackLimit);
149 __ movp(scratch, rsp);
152 __ subp(scratch, kScratchRegister);
153 __ sarp(scratch, Immediate(kPointerSizeLog2));
155 __ cmpp(scratch, num_args);
157 __ j(less_equal, stack_overflow, stack_overflow_distance);
163 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
174 FrameScope scope(masm, StackFrame::CONSTRUCT);
175 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
182 __ PushRoot(RootIndex::kTheHoleValue);
193 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
194 __ testl(FieldOperand(rbx, SharedFunctionInfo::kFlagsOffset),
195 Immediate(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
196 __ j(not_zero, ¬_create_implicit_receiver, Label::kNear);
199 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
200 __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
201 RelocInfo::CODE_TARGET);
202 __ jmp(&post_instantiation_deopt_entry, Label::kNear);
205 __ bind(¬_create_implicit_receiver);
206 __ LoadRoot(rax, RootIndex::kTheHoleValue);
217 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
219 __ bind(&post_instantiation_deopt_entry);
240 __ movp(rdi, Operand(rbp, ConstructFrameConstants::kConstructorOffset));
241 __ SmiUntag(rax, Operand(rbp, ConstructFrameConstants::kLengthOffset));
244 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
248 Label enough_stack_space, stack_overflow;
249 Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
250 __ jmp(&enough_stack_space, Label::kNear);
252 __ bind(&stack_overflow);
254 __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
255 __ CallRuntime(Runtime::kThrowStackOverflow);
259 __ bind(&enough_stack_space);
276 __ jmp(&entry, Label::kNear);
278 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
281 __ j(greater_equal, &loop, Label::kNear);
284 ParameterCount actual(rax);
285 __ InvokeFunction(rdi, rdx, actual, CALL_FUNCTION);
297 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
301 __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset));
306 Label use_receiver, do_throw, leave_frame;
309 __ JumpIfRoot(rax, RootIndex::kUndefinedValue, &use_receiver, Label::kNear);
315 __ JumpIfSmi(rax, &use_receiver, Label::kNear);
319 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
320 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
321 __ j(above_equal, &leave_frame, Label::kNear);
322 __ jmp(&use_receiver, Label::kNear);
325 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
329 __ bind(&use_receiver);
330 __ movp(rax, Operand(rsp, 0 * kPointerSize));
331 __ JumpIfRoot(rax, RootIndex::kTheHoleValue, &do_throw, Label::kNear);
333 __ bind(&leave_frame);
335 __ movp(rbx, Operand(rbp, ConstructFrameConstants::kLengthOffset));
339 __ PopReturnAddressTo(rcx);
340 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
341 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
342 __ PushReturnAddressFrom(rcx);
346 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
347 Generate_JSBuiltinsConstructStubHelper(masm);
350 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
351 FrameScope scope(masm, StackFrame::INTERNAL);
353 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
356 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
383 FrameScope scope(masm, StackFrame::INTERNAL);
386 ExternalReference context_address = ExternalReference::Create(
387 IsolateAddressId::kContextAddress, masm->isolate());
388 __ movp(rsi, masm->ExternalReferenceAsOperand(context_address));
397 __ movp(kScratchRegister, Operand(rbp, 0));
398 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
420 FrameScope scope(masm, StackFrame::INTERNAL);
423 ExternalReference context_address = ExternalReference::Create(
424 IsolateAddressId::kContextAddress, masm->isolate());
425 __ movp(rsi, masm->ExternalReferenceAsOperand(context_address));
452 Label enough_stack_space, stack_overflow;
453 Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear);
454 __ jmp(&enough_stack_space, Label::kNear);
456 __ bind(&stack_overflow);
457 __ CallRuntime(Runtime::kThrowStackOverflow);
461 __ bind(&enough_stack_space);
468 __ jmp(&entry, Label::kNear);
470 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
471 __ Push(Operand(kScratchRegister, 0));
472 __ addp(rcx, Immediate(1));
475 __ j(not_equal, &loop, Label::kNear);
478 Handle<Code> builtin = is_construct
479 ? BUILTIN_CODE(masm->isolate(), Construct)
480 : masm->isolate()->builtins()->Call();
481 __ Call(builtin, RelocInfo::CODE_TARGET);
491 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
492 Generate_JSEntryTrampolineHelper(masm,
false);
495 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
496 Generate_JSEntryTrampolineHelper(masm,
true);
499 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
504 __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
505 __ j(not_equal, &done, Label::kNear);
507 FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
513 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
519 __ AssertGeneratorObject(rdx);
522 __ movp(FieldOperand(rdx, JSGeneratorObject::kInputOrDebugPosOffset), rax);
523 __ RecordWriteField(rdx, JSGeneratorObject::kInputOrDebugPosOffset, rax, rcx,
527 __ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
528 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
531 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
532 Label stepping_prepared;
533 ExternalReference debug_hook =
534 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
535 Operand debug_hook_operand = masm->ExternalReferenceAsOperand(debug_hook);
536 __ cmpb(debug_hook_operand, Immediate(0));
537 __ j(not_equal, &prepare_step_in_if_stepping);
540 ExternalReference debug_suspended_generator =
541 ExternalReference::debug_suspended_generator_address(masm->isolate());
542 Operand debug_suspended_generator_operand =
543 masm->ExternalReferenceAsOperand(debug_suspended_generator);
544 __ cmpp(rdx, debug_suspended_generator_operand);
545 __ j(equal, &prepare_step_in_suspended_generator);
546 __ bind(&stepping_prepared);
550 Label stack_overflow;
551 __ CompareRoot(rsp, RootIndex::kRealStackLimit);
552 __ j(below, &stack_overflow);
555 __ PopReturnAddressTo(rax);
558 __ Push(FieldOperand(rdx, JSGeneratorObject::kReceiverOffset));
569 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
571 rcx, FieldOperand(rcx, SharedFunctionInfo::kFormalParameterCountOffset));
574 FieldOperand(rdx, JSGeneratorObject::kParametersAndRegistersOffset));
577 Label done_loop, loop;
582 __ j(greater_equal, &done_loop, Label::kNear);
583 __ Push(FieldOperand(rbx, r9, times_pointer_size, FixedArray::kHeaderSize));
584 __ addl(r9, Immediate(1));
591 if (FLAG_debug_code) {
592 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
593 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kFunctionDataOffset));
594 GetSharedFunctionInfoBytecode(masm, rcx, kScratchRegister);
595 __ CmpObjectType(rcx, BYTECODE_ARRAY_TYPE, rcx);
596 __ Assert(equal, AbortReason::kMissingBytecodeArray);
601 __ PushReturnAddressFrom(rax);
602 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
603 __ movzxwq(rax, FieldOperand(
604 rax, SharedFunctionInfo::kFormalParameterCountOffset));
608 static_assert(kJavaScriptCallCodeStartRegister == rcx,
"ABI mismatch");
609 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
610 __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
614 __ bind(&prepare_step_in_if_stepping);
616 FrameScope scope(masm, StackFrame::INTERNAL);
620 __ PushRoot(RootIndex::kTheHoleValue);
621 __ CallRuntime(Runtime::kDebugOnFunctionCall);
623 __ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
625 __ jmp(&stepping_prepared);
627 __ bind(&prepare_step_in_suspended_generator);
629 FrameScope scope(masm, StackFrame::INTERNAL);
631 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
633 __ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset));
635 __ jmp(&stepping_prepared);
637 __ bind(&stack_overflow);
639 FrameScope scope(masm, StackFrame::INTERNAL);
640 __ CallRuntime(Runtime::kThrowStackOverflow);
647 static void ReplaceClosureCodeWithOptimizedCode(
648 MacroAssembler* masm, Register optimized_code, Register closure,
649 Register scratch1, Register scratch2, Register scratch3) {
652 __ movp(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
653 __ movp(scratch1, optimized_code);
654 __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
655 kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
658 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
660 Register args_count = scratch1;
661 Register return_pc = scratch2;
665 Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
667 FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
673 __ PopReturnAddressTo(return_pc);
674 __ addp(rsp, args_count);
675 __ PushReturnAddressFrom(return_pc);
679 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
681 OptimizationMarker marker,
682 Runtime::FunctionId function_id) {
684 __ SmiCompare(smi_entry, Smi::FromEnum(marker));
685 __ j(not_equal, &no_match);
686 GenerateTailCallToReturnedCode(masm, function_id);
690 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
691 Register feedback_vector,
692 Register scratch1, Register scratch2,
700 DCHECK(!AreAliased(feedback_vector, rax, rdx, rdi, scratch1, scratch2,
703 Label optimized_code_slot_is_weak_ref, fallthrough;
705 Register closure = rdi;
706 Register optimized_code_entry = scratch1;
708 __ movp(optimized_code_entry,
709 FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
714 __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
720 __ SmiCompare(optimized_code_entry,
721 Smi::FromEnum(OptimizationMarker::kNone));
722 __ j(equal, &fallthrough);
727 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
728 OptimizationMarker::kLogFirstExecution,
729 Runtime::kFunctionFirstExecution);
730 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
731 OptimizationMarker::kCompileOptimized,
732 Runtime::kCompileOptimized_NotConcurrent);
733 TailCallRuntimeIfMarkerEquals(
734 masm, optimized_code_entry,
735 OptimizationMarker::kCompileOptimizedConcurrent,
736 Runtime::kCompileOptimized_Concurrent);
741 if (FLAG_debug_code) {
742 __ SmiCompare(optimized_code_entry,
743 Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
744 __ Assert(equal, AbortReason::kExpectedOptimizationSentinel);
746 __ jmp(&fallthrough);
752 __ bind(&optimized_code_slot_is_weak_ref);
754 __ LoadWeakValue(optimized_code_entry, &fallthrough);
758 Label found_deoptimized_code;
760 FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
762 FieldOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset),
763 Immediate(1 << Code::kMarkedForDeoptimizationBit));
764 __ j(not_zero, &found_deoptimized_code);
770 ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
771 scratch2, scratch3, feedback_vector);
772 static_assert(kJavaScriptCallCodeStartRegister == rcx,
"ABI mismatch");
773 __ Move(rcx, optimized_code_entry);
774 __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
779 __ bind(&found_deoptimized_code);
780 GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
785 __ bind(&fallthrough);
791 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
792 Register bytecode_array,
793 Register bytecode_offset,
794 Register bytecode, Register scratch1,
796 Register bytecode_size_table = scratch1;
797 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
800 __ Move(bytecode_size_table,
801 ExternalReference::bytecode_size_table_address());
804 Label process_bytecode, extra_wide;
805 STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
806 STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
807 STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
809 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
810 __ cmpb(bytecode, Immediate(0x3));
811 __ j(above, &process_bytecode, Label::kNear);
812 __ testb(bytecode, Immediate(0x1));
813 __ j(not_equal, &extra_wide, Label::kNear);
816 __ incl(bytecode_offset);
817 __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
818 __ addp(bytecode_size_table,
819 Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
820 __ jmp(&process_bytecode, Label::kNear);
822 __ bind(&extra_wide);
824 __ incl(bytecode_offset);
825 __ movzxbp(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
826 __ addp(bytecode_size_table,
827 Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
829 __ bind(&process_bytecode);
832 #define JUMP_IF_EQUAL(NAME) \ 834 Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \ 835 __ j(equal, if_return, Label::kFar); 836 RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
840 __ addl(bytecode_offset, Operand(bytecode_size_table, bytecode, times_4, 0));
857 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
858 Register closure = rdi;
859 Register feedback_vector = rbx;
862 __ movp(feedback_vector,
863 FieldOperand(closure, JSFunction::kFeedbackCellOffset));
864 __ movp(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
866 Label push_stack_frame;
869 __ JumpIfRoot(feedback_vector, RootIndex::kUndefinedValue, &push_stack_frame);
873 MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, rcx, r14, r15);
877 FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
882 __ bind(&push_stack_frame);
883 FrameScope frame_scope(masm, StackFrame::MANUAL);
891 __ movp(rax, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
892 __ movp(kInterpreterBytecodeArrayRegister,
893 FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
894 GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister,
898 if (FLAG_debug_code) {
899 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
900 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
904 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
908 __ movb(FieldOperand(kInterpreterBytecodeArrayRegister,
909 BytecodeArray::kBytecodeAgeOffset),
910 Immediate(BytecodeArray::kNoAgeBytecodeAge));
913 __ movp(kInterpreterBytecodeOffsetRegister,
914 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
917 __ Push(kInterpreterBytecodeArrayRegister);
918 __ SmiTag(rcx, kInterpreterBytecodeOffsetRegister);
924 __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
925 BytecodeArray::kFrameSizeOffset));
931 __ CompareRoot(rax, RootIndex::kRealStackLimit);
932 __ j(above_equal, &ok, Label::kNear);
933 __ CallRuntime(Runtime::kThrowStackOverflow);
939 __ LoadRoot(rax, RootIndex::kUndefinedValue);
940 __ j(always, &loop_check, Label::kNear);
941 __ bind(&loop_header);
945 __ bind(&loop_check);
946 __ subp(rcx, Immediate(kPointerSize));
947 __ j(greater_equal, &loop_header, Label::kNear);
952 Label no_incoming_new_target_or_generator_register;
955 FieldOperand(kInterpreterBytecodeArrayRegister,
956 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
958 __ j(zero, &no_incoming_new_target_or_generator_register, Label::kNear);
959 __ movp(Operand(rbp, rax, times_pointer_size, 0), rdx);
960 __ bind(&no_incoming_new_target_or_generator_register);
963 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
968 __ bind(&do_dispatch);
970 kInterpreterDispatchTableRegister,
971 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
972 __ movzxbp(r11, Operand(kInterpreterBytecodeArrayRegister,
973 kInterpreterBytecodeOffsetRegister, times_1, 0));
975 kJavaScriptCallCodeStartRegister,
976 Operand(kInterpreterDispatchTableRegister, r11, times_pointer_size, 0));
977 __ call(kJavaScriptCallCodeStartRegister);
978 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
984 __ movp(kInterpreterBytecodeArrayRegister,
985 Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
986 __ movp(kInterpreterBytecodeOffsetRegister,
987 Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
988 __ SmiUntag(kInterpreterBytecodeOffsetRegister,
989 kInterpreterBytecodeOffsetRegister);
993 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
994 kInterpreterBytecodeOffsetRegister, times_1, 0));
995 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
996 kInterpreterBytecodeOffsetRegister, rbx, rcx,
998 __ jmp(&do_dispatch);
1000 __ bind(&do_return);
1002 LeaveInterpreterFrame(masm, rbx, rcx);
1006 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1008 Register start_address,
1011 __ Move(scratch, num_args);
1012 __ shlp(scratch, Immediate(kPointerSizeLog2));
1014 __ addp(scratch, start_address);
1017 Label loop_header, loop_check;
1018 __ j(always, &loop_check, Label::kNear);
1019 __ bind(&loop_header);
1020 __ Push(Operand(start_address, 0));
1021 __ subp(start_address, Immediate(kPointerSize));
1022 __ bind(&loop_check);
1023 __ cmpp(start_address, scratch);
1024 __ j(greater, &loop_header, Label::kNear);
1028 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1029 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1030 InterpreterPushArgsMode mode) {
1031 DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1039 Label stack_overflow;
1042 __ leal(rcx, Operand(rax, 1));
1045 Generate_StackOverflowCheck(masm, rcx, rdx, &stack_overflow);
1048 __ PopReturnAddressTo(kScratchRegister);
1051 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1052 __ PushRoot(RootIndex::kUndefinedValue);
1057 Generate_InterpreterPushArgs(masm, rcx, rbx, rdx);
1059 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1065 __ PushReturnAddressFrom(kScratchRegister);
1067 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1068 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1069 RelocInfo::CODE_TARGET);
1071 __ Jump(masm->isolate()->builtins()->Call(receiver_mode),
1072 RelocInfo::CODE_TARGET);
1076 __ bind(&stack_overflow);
1078 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1085 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1086 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1097 Label stack_overflow;
1100 Generate_StackOverflowCheck(masm, rax, r8, &stack_overflow);
1103 __ PopReturnAddressTo(kScratchRegister);
1106 __ Push(Immediate(0));
1109 Generate_InterpreterPushArgs(masm, rax, rcx, r8);
1111 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1116 __ PushReturnAddressFrom(kScratchRegister);
1118 __ PushReturnAddressFrom(kScratchRegister);
1119 __ AssertUndefinedOrAllocationSite(rbx);
1122 if (mode == InterpreterPushArgsMode::kArrayFunction) {
1125 __ AssertFunction(rdi);
1127 Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1128 __ Jump(code, RelocInfo::CODE_TARGET);
1129 }
else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1131 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1132 RelocInfo::CODE_TARGET);
1134 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1136 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1140 __ bind(&stack_overflow);
1142 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1148 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1151 Label builtin_trampoline, trampoline_loaded;
1152 Smi interpreter_entry_return_pc_offset(
1153 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1154 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1160 __ movp(rbx, Operand(rbp, StandardFrameConstants::kFunctionOffset));
1161 __ movp(rbx, FieldOperand(rbx, JSFunction::kSharedFunctionInfoOffset));
1162 __ movp(rbx, FieldOperand(rbx, SharedFunctionInfo::kFunctionDataOffset));
1163 __ CmpObjectType(rbx, INTERPRETER_DATA_TYPE, kScratchRegister);
1164 __ j(not_equal, &builtin_trampoline, Label::kNear);
1167 FieldOperand(rbx, InterpreterData::kInterpreterTrampolineOffset));
1168 __ addp(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1169 __ jmp(&trampoline_loaded, Label::kNear);
1171 __ bind(&builtin_trampoline);
1173 __ ExternalReferenceAsOperand(
1175 address_of_interpreter_entry_trampoline_instruction_start(
1179 __ bind(&trampoline_loaded);
1180 __ addp(rbx, Immediate(interpreter_entry_return_pc_offset->value()));
1185 kInterpreterDispatchTableRegister,
1186 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1189 __ movp(kInterpreterBytecodeArrayRegister,
1190 Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1192 if (FLAG_debug_code) {
1194 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1195 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1199 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1203 __ movp(kInterpreterBytecodeOffsetRegister,
1204 Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1205 __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1206 kInterpreterBytecodeOffsetRegister);
1209 __ movzxbp(r11, Operand(kInterpreterBytecodeArrayRegister,
1210 kInterpreterBytecodeOffsetRegister, times_1, 0));
1212 kJavaScriptCallCodeStartRegister,
1213 Operand(kInterpreterDispatchTableRegister, r11, times_pointer_size, 0));
1214 __ jmp(kJavaScriptCallCodeStartRegister);
1217 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1219 __ movp(kInterpreterBytecodeArrayRegister,
1220 Operand(rbp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1221 __ movp(kInterpreterBytecodeOffsetRegister,
1222 Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1223 __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1224 kInterpreterBytecodeOffsetRegister);
1227 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
1228 kInterpreterBytecodeOffsetRegister, times_1, 0));
1232 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1233 kInterpreterBytecodeOffsetRegister, rbx, rcx,
1237 __ SmiTag(rbx, kInterpreterBytecodeOffsetRegister);
1238 __ movp(Operand(rbp, InterpreterFrameConstants::kBytecodeOffsetFromFp), rbx);
1240 Generate_InterpreterEnterBytecode(masm);
1243 __ bind(&if_return);
1244 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1247 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1248 Generate_InterpreterEnterBytecode(masm);
1251 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1259 FrameScope scope(masm, StackFrame::INTERNAL);
1263 __ SmiTag(rax, rax);
1273 for (
int j = 0; j < 4; ++j) {
1276 __ cmpp(rcx, Immediate(j));
1277 __ j(not_equal, &over, Label::kNear);
1279 for (
int i = j - 1;
i >= 0; --
i) {
1281 rbp, StandardFrameConstants::kCallerSPOffset +
i * kPointerSize));
1283 for (
int i = 0;
i < 3 - j; ++
i) {
1284 __ PushRoot(RootIndex::kUndefinedValue);
1287 __ jmp(&args_done, Label::kNear);
1291 __ bind(&args_done);
1294 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1296 __ JumpIfSmi(rax, &failed, Label::kNear);
1300 __ SmiUntag(rcx, rcx);
1301 scope.GenerateLeaveFrame();
1303 __ PopReturnAddressTo(rbx);
1305 __ leap(rsp, Operand(rsp, rcx, times_pointer_size, 0));
1306 __ PushReturnAddressFrom(rbx);
1314 __ SmiUntag(rax, rax);
1318 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1319 __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1324 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1325 bool java_script_builtin,
1327 const RegisterConfiguration* config(RegisterConfiguration::Default());
1328 int allocatable_register_count = config->num_allocatable_general_registers();
1332 __ movq(Operand(rsp,
1333 config->num_allocatable_general_registers() * kPointerSize +
1334 BuiltinContinuationFrameConstants::kFixedFrameSize),
1337 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
1338 int code = config->GetAllocatableGeneralCode(
i);
1339 __ popq(Register::from_code(code));
1340 if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1341 __ SmiUntag(Register::from_code(code), Register::from_code(code));
1346 Operand(rsp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1347 const int offsetToPC =
1348 BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp - kPointerSize;
1349 __ popq(Operand(rsp, offsetToPC));
1350 __ Drop(offsetToPC / kPointerSize);
1351 __ addq(Operand(rsp, 0), Immediate(Code::kHeaderSize - kHeapObjectTag));
1356 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1357 Generate_ContinueToBuiltinHelper(masm,
false,
false);
1360 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1361 MacroAssembler* masm) {
1362 Generate_ContinueToBuiltinHelper(masm,
false,
true);
1365 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1366 Generate_ContinueToBuiltinHelper(masm,
true,
false);
1369 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1370 MacroAssembler* masm) {
1371 Generate_ContinueToBuiltinHelper(masm,
true,
true);
1374 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1377 FrameScope scope(masm, StackFrame::INTERNAL);
1378 __ CallRuntime(Runtime::kNotifyDeoptimized);
1382 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), rax.code());
1383 __ movp(rax, Operand(rsp, kPCOnStackSize));
1384 __ ret(1 * kPointerSize);
1388 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1401 Label no_arg_array, no_this_arg;
1402 StackArgumentsAccessor args(rsp, rax);
1403 __ LoadRoot(rdx, RootIndex::kUndefinedValue);
1405 __ movp(rdi, args.GetReceiverOperand());
1407 __ j(zero, &no_this_arg, Label::kNear);
1409 __ movp(rdx, args.GetArgumentOperand(1));
1410 __ cmpp(rax, Immediate(1));
1411 __ j(equal, &no_arg_array, Label::kNear);
1412 __ movp(rbx, args.GetArgumentOperand(2));
1413 __ bind(&no_arg_array);
1415 __ bind(&no_this_arg);
1416 __ PopReturnAddressTo(rcx);
1417 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1419 __ PushReturnAddressFrom(rcx);
1435 __ JumpIfRoot(rbx, RootIndex::kNullValue, &no_arguments, Label::kNear);
1436 __ JumpIfRoot(rbx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
1439 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1440 RelocInfo::CODE_TARGET);
1445 __ bind(&no_arguments);
1448 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1453 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1468 __ j(not_zero, &done, Label::kNear);
1469 __ PopReturnAddressTo(rbx);
1470 __ PushRoot(RootIndex::kUndefinedValue);
1471 __ PushReturnAddressFrom(rbx);
1478 StackArgumentsAccessor args(rsp, rax);
1479 __ movp(rdi, args.GetReceiverOperand());
1488 StackArgumentsAccessor args(rsp, rcx);
1490 __ movp(rbx, args.GetArgumentOperand(1));
1491 __ movp(args.GetArgumentOperand(0), rbx);
1493 __ j(not_zero, &loop);
1494 __ DropUnderReturnAddress(1, rbx);
1501 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1504 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1519 StackArgumentsAccessor args(rsp, rax);
1520 __ LoadRoot(rdi, RootIndex::kUndefinedValue);
1523 __ cmpp(rax, Immediate(1));
1524 __ j(below, &done, Label::kNear);
1525 __ movp(rdi, args.GetArgumentOperand(1));
1526 __ j(equal, &done, Label::kNear);
1527 __ movp(rdx, args.GetArgumentOperand(2));
1528 __ cmpp(rax, Immediate(3));
1529 __ j(below, &done, Label::kNear);
1530 __ movp(rbx, args.GetArgumentOperand(3));
1532 __ PopReturnAddressTo(rcx);
1533 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1535 __ PushReturnAddressFrom(rcx);
1550 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1551 RelocInfo::CODE_TARGET);
1554 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1570 StackArgumentsAccessor args(rsp, rax);
1571 __ LoadRoot(rdi, RootIndex::kUndefinedValue);
1574 __ cmpp(rax, Immediate(1));
1575 __ j(below, &done, Label::kNear);
1576 __ movp(rdi, args.GetArgumentOperand(1));
1578 __ j(equal, &done, Label::kNear);
1579 __ movp(rbx, args.GetArgumentOperand(2));
1580 __ cmpp(rax, Immediate(3));
1581 __ j(below, &done, Label::kNear);
1582 __ movp(rdx, args.GetArgumentOperand(3));
1584 __ PopReturnAddressTo(rcx);
1585 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1586 __ PushRoot(RootIndex::kUndefinedValue);
1587 __ PushReturnAddressFrom(rcx);
1607 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1608 RelocInfo::CODE_TARGET);
1611 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
1617 Label generic_array_code;
1619 if (FLAG_debug_code) {
1621 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1623 STATIC_ASSERT(kSmiTag == 0);
1624 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1626 AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1627 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1628 __ Check(equal, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1633 __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
1634 RelocInfo::CODE_TARGET);
1637 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1642 __ Push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1653 __ Push(Immediate(0));
1656 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1658 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1665 __ PopReturnAddressTo(rcx);
1666 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1667 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1668 __ PushReturnAddressFrom(rcx);
1671 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1679 Label invoke, dont_adapt_arguments, stack_overflow, enough, too_few;
1680 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1681 __ j(equal, &dont_adapt_arguments);
1683 __ j(less, &too_few);
1687 EnterArgumentsAdaptorFrame(masm);
1689 Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
1692 const int offset = StandardFrameConstants::kCallerSPOffset;
1693 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
1699 __ Push(Operand(rax, 0));
1700 __ subp(rax, Immediate(kPointerSize));
1709 EnterArgumentsAdaptorFrame(masm);
1711 Generate_StackOverflowCheck(masm, rbx, rcx, &stack_overflow);
1714 const int offset = StandardFrameConstants::kCallerSPOffset;
1715 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
1721 __ Push(Operand(rdi, 0));
1722 __ subp(rdi, Immediate(kPointerSize));
1728 __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue);
1731 __ Push(kScratchRegister);
1736 __ movp(rdi, Operand(rbp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
1745 static_assert(kJavaScriptCallCodeStartRegister == rcx,
"ABI mismatch");
1746 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1747 __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1751 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1754 LeaveArgumentsAdaptorFrame(masm);
1760 __ bind(&dont_adapt_arguments);
1761 static_assert(kJavaScriptCallCodeStartRegister == rcx,
"ABI mismatch");
1762 __ movp(rcx, FieldOperand(rdi, JSFunction::kCodeOffset));
1763 __ addp(rcx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1766 __ bind(&stack_overflow);
1768 FrameScope frame(masm, StackFrame::MANUAL);
1769 __ CallRuntime(Runtime::kThrowStackOverflow);
1775 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1776 Handle<Code> code) {
1785 if (masm->emit_debug_code()) {
1788 __ AssertNotSmi(rbx);
1790 __ movp(map, FieldOperand(rbx, HeapObject::kMapOffset));
1791 __ CmpInstanceType(map, FIXED_ARRAY_TYPE);
1793 __ CmpInstanceType(map, FIXED_DOUBLE_ARRAY_TYPE);
1794 __ j(not_equal, &fail);
1795 __ cmpl(rcx, Immediate(0));
1799 __ Abort(AbortReason::kOperandIsNotAFixedArray);
1804 Label stack_overflow;
1805 Generate_StackOverflowCheck(masm, rcx, r8, &stack_overflow, Label::kNear);
1809 __ PopReturnAddressTo(r8);
1811 Label done, push, loop;
1814 __ j(equal, &done, Label::kNear);
1817 FieldOperand(rbx, r9, times_pointer_size, FixedArray::kHeaderSize));
1818 __ CompareRoot(r11, RootIndex::kTheHoleValue);
1819 __ j(not_equal, &push, Label::kNear);
1820 __ LoadRoot(r11, RootIndex::kUndefinedValue);
1826 __ PushReturnAddressFrom(r8);
1831 __ Jump(code, RelocInfo::CODE_TARGET);
1833 __ bind(&stack_overflow);
1834 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1838 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1839 CallOrConstructMode mode,
1840 Handle<Code> code) {
1849 if (mode == CallOrConstructMode::kConstruct) {
1850 Label new_target_constructor, new_target_not_constructor;
1851 __ JumpIfSmi(rdx, &new_target_not_constructor, Label::kNear);
1852 __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
1853 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
1854 Immediate(Map::IsConstructorBit::kMask));
1855 __ j(not_zero, &new_target_constructor, Label::kNear);
1856 __ bind(&new_target_not_constructor);
1858 FrameScope scope(masm, StackFrame::MANUAL);
1859 __ EnterFrame(StackFrame::INTERNAL);
1861 __ CallRuntime(Runtime::kThrowNotConstructor);
1863 __ bind(&new_target_constructor);
1867 Label arguments_adaptor, arguments_done;
1868 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1869 __ cmpp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset),
1870 Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1871 __ j(equal, &arguments_adaptor, Label::kNear);
1873 __ movp(r8, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1874 __ movp(r8, FieldOperand(r8, JSFunction::kSharedFunctionInfoOffset));
1876 r8, FieldOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
1879 __ jmp(&arguments_done, Label::kNear);
1880 __ bind(&arguments_adaptor);
1883 Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1885 __ bind(&arguments_done);
1887 Label stack_done, stack_overflow;
1889 __ j(less_equal, &stack_done);
1892 Generate_StackOverflowCheck(masm, r8, rcx, &stack_overflow, Label::kNear);
1898 __ PopReturnAddressTo(rcx);
1901 StackArgumentsAccessor args(rbx, r8, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1902 __ Push(args.GetArgumentOperand(0));
1904 __ j(not_zero, &loop);
1906 __ PushReturnAddressFrom(rcx);
1909 __ jmp(&stack_done, Label::kNear);
1910 __ bind(&stack_overflow);
1911 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1912 __ bind(&stack_done);
1915 __ Jump(code, RelocInfo::CODE_TARGET);
1919 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1920 ConvertReceiverMode mode) {
1925 StackArgumentsAccessor args(rsp, rax);
1926 __ AssertFunction(rdi);
1930 Label class_constructor;
1931 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1932 __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
1933 Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask));
1934 __ j(not_zero, &class_constructor);
1945 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1948 __ testl(FieldOperand(rdx, SharedFunctionInfo::kFlagsOffset),
1949 Immediate(SharedFunctionInfo::IsNativeBit::kMask |
1950 SharedFunctionInfo::IsStrictBit::kMask));
1951 __ j(not_zero, &done_convert);
1960 if (mode == ConvertReceiverMode::kNullOrUndefined) {
1962 __ LoadGlobalProxy(rcx);
1964 Label convert_to_object, convert_receiver;
1965 __ movp(rcx, args.GetReceiverOperand());
1966 __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
1967 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1968 __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
1969 __ j(above_equal, &done_convert);
1970 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1971 Label convert_global_proxy;
1972 __ JumpIfRoot(rcx, RootIndex::kUndefinedValue, &convert_global_proxy,
1974 __ JumpIfNotRoot(rcx, RootIndex::kNullValue, &convert_to_object,
1976 __ bind(&convert_global_proxy);
1979 __ LoadGlobalProxy(rcx);
1981 __ jmp(&convert_receiver);
1983 __ bind(&convert_to_object);
1988 FrameScope scope(masm, StackFrame::INTERNAL);
1989 __ SmiTag(rax, rax);
1994 __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1995 RelocInfo::CODE_TARGET);
2000 __ SmiUntag(rax, rax);
2002 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2003 __ bind(&convert_receiver);
2005 __ movp(args.GetReceiverOperand(), rcx);
2007 __ bind(&done_convert);
2017 rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2018 ParameterCount actual(rax);
2019 ParameterCount expected(rbx);
2021 __ InvokeFunctionCode(rdi, no_reg, expected, actual, JUMP_FUNCTION);
2024 __ bind(&class_constructor);
2026 FrameScope frame(masm, StackFrame::INTERNAL);
2028 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2034 void Generate_PushBoundArguments(MacroAssembler* masm) {
2042 Label no_bound_arguments;
2043 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2044 __ SmiUntag(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2046 __ j(zero, &no_bound_arguments);
2059 __ leap(kScratchRegister, Operand(rbx, times_pointer_size, 0));
2060 __ subp(rsp, kScratchRegister);
2064 __ CompareRoot(rsp, RootIndex::kRealStackLimit);
2065 __ j(above_equal, &done, Label::kNear);
2067 __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0));
2069 FrameScope scope(masm, StackFrame::MANUAL);
2070 __ EnterFrame(StackFrame::INTERNAL);
2071 __ CallRuntime(Runtime::kThrowStackOverflow);
2083 __ leap(rbx, Operand(rsp, rbx, times_pointer_size, 0));
2085 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
2086 __ movp(Operand(rsp, rcx, times_pointer_size, 0), kScratchRegister);
2095 __ movp(rcx, FieldOperand(rdi, JSBoundFunction::kBoundArgumentsOffset));
2096 __ SmiUntag(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
2099 __ movp(kScratchRegister, FieldOperand(rcx, rbx, times_pointer_size,
2100 FixedArray::kHeaderSize));
2101 __ movp(Operand(rsp, rax, times_pointer_size, 0), kScratchRegister);
2102 __ leal(rax, Operand(rax, 1));
2103 __ j(greater, &loop);
2111 __ bind(&no_bound_arguments);
2117 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2122 __ AssertBoundFunction(rdi);
2125 StackArgumentsAccessor args(rsp, rax);
2126 __ movp(rbx, FieldOperand(rdi, JSBoundFunction::kBoundThisOffset));
2127 __ movp(args.GetReceiverOperand(), rbx);
2130 Generate_PushBoundArguments(masm);
2133 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2134 __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2135 RelocInfo::CODE_TARGET);
2139 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2144 StackArgumentsAccessor args(rsp, rax);
2147 __ JumpIfSmi(rdi, &non_callable);
2148 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2149 __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2150 RelocInfo::CODE_TARGET, equal);
2152 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2153 __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2154 RelocInfo::CODE_TARGET, equal);
2157 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2158 Immediate(Map::IsCallableBit::kMask));
2159 __ j(zero, &non_callable, Label::kNear);
2162 __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2163 __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET,
2170 __ movp(args.GetReceiverOperand(), rdi);
2172 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, rdi);
2173 __ Jump(masm->isolate()->builtins()->CallFunction(
2174 ConvertReceiverMode::kNotNullOrUndefined),
2175 RelocInfo::CODE_TARGET);
2178 __ bind(&non_callable);
2180 FrameScope scope(masm, StackFrame::INTERNAL);
2182 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2187 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2193 __ AssertConstructor(rdi);
2194 __ AssertFunction(rdi);
2198 __ LoadRoot(rbx, RootIndex::kUndefinedValue);
2201 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2202 __ testl(FieldOperand(rcx, SharedFunctionInfo::kFlagsOffset),
2203 Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2204 __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2205 RelocInfo::CODE_TARGET, not_zero);
2207 __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2208 RelocInfo::CODE_TARGET);
2212 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2218 __ AssertConstructor(rdi);
2219 __ AssertBoundFunction(rdi);
2222 Generate_PushBoundArguments(masm);
2228 __ j(not_equal, &done, Label::kNear);
2230 FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2235 __ movp(rdi, FieldOperand(rdi, JSBoundFunction::kBoundTargetFunctionOffset));
2236 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2240 void Builtins::Generate_Construct(MacroAssembler* masm) {
2247 StackArgumentsAccessor args(rsp, rax);
2250 Label non_constructor;
2251 __ JumpIfSmi(rdi, &non_constructor);
2254 __ movq(rcx, FieldOperand(rdi, HeapObject::kMapOffset));
2255 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2256 Immediate(Map::IsConstructorBit::kMask));
2257 __ j(zero, &non_constructor);
2260 __ CmpInstanceType(rcx, JS_FUNCTION_TYPE);
2261 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2262 RelocInfo::CODE_TARGET, equal);
2266 __ CmpInstanceType(rcx, JS_BOUND_FUNCTION_TYPE);
2267 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2268 RelocInfo::CODE_TARGET, equal);
2271 __ CmpInstanceType(rcx, JS_PROXY_TYPE);
2272 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy), RelocInfo::CODE_TARGET,
2278 __ movp(args.GetReceiverOperand(), rdi);
2280 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, rdi);
2281 __ Jump(masm->isolate()->builtins()->CallFunction(),
2282 RelocInfo::CODE_TARGET);
2287 __ bind(&non_constructor);
2288 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2289 RelocInfo::CODE_TARGET);
2292 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2294 __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2295 __ movp(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset));
2298 FrameScope scope(masm, StackFrame::INTERNAL);
2301 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2307 __ j(not_equal, &skip, Label::kNear);
2317 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2320 __ SmiUntag(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
2321 DeoptimizationData::kOsrPcOffsetIndex) -
2325 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
2328 __ movq(StackOperandForReturnAddress(0), rax);
2334 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2338 __ SmiTag(r11, r11);
2340 HardAbortScope hard_abort(masm);
2341 FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2346 static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
2347 arraysize(wasm::kGpParamRegisters),
2348 "frame size mismatch");
2349 for (Register reg : wasm::kGpParamRegisters) {
2352 static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs ==
2353 arraysize(wasm::kFpParamRegisters),
2354 "frame size mismatch");
2355 __ subp(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2357 for (DoubleRegister reg : wasm::kFpParamRegisters) {
2358 __ movdqu(Operand(rsp, offset), reg);
2359 offset += kSimd128Size;
2363 __ Push(kWasmInstanceRegister);
2367 __ movp(rcx, FieldOperand(kWasmInstanceRegister,
2368 WasmInstanceObject::kCEntryStubOffset));
2371 __ Move(kContextRegister, Smi::zero());
2372 __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, rcx);
2374 __ movq(r11, kReturnRegister0);
2377 for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
2378 offset -= kSimd128Size;
2379 __ movdqu(reg, Operand(rsp, offset));
2381 DCHECK_EQ(0, offset);
2382 __ addp(rsp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2383 for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
2391 void Builtins::Generate_CEntry(MacroAssembler* masm,
int result_size,
2392 SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2393 bool builtin_exit_frame) {
2408 const Register kCCallArg0 = rcx;
2409 const Register kCCallArg1 = rdx;
2410 const Register kCCallArg2 = r8;
2411 const Register kCCallArg3 = r9;
2412 const int kArgExtraStackSpace = 2;
2413 const int kMaxRegisterResultSize = 1;
2419 const Register kCCallArg0 = rdi;
2420 const Register kCCallArg1 = rsi;
2421 const Register kCCallArg2 = rdx;
2422 const Register kCCallArg3 = rcx;
2423 const int kArgExtraStackSpace = 0;
2424 const int kMaxRegisterResultSize = 2;
2428 int arg_stack_space =
2429 kArgExtraStackSpace +
2430 (result_size <= kMaxRegisterResultSize ? 0 : result_size);
2431 if (argv_mode == kArgvInRegister) {
2432 DCHECK(save_doubles == kDontSaveFPRegs);
2433 DCHECK(!builtin_exit_frame);
2434 __ EnterApiExitFrame(arg_stack_space);
2439 arg_stack_space, save_doubles == kSaveFPRegs,
2440 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2450 if (FLAG_debug_code) {
2451 __ CheckStackAlignment();
2456 if (result_size <= kMaxRegisterResultSize) {
2459 __ movp(kCCallArg0, r14);
2460 __ movp(kCCallArg1, r15);
2461 __ Move(kCCallArg2, ExternalReference::isolate_address(masm->isolate()));
2463 DCHECK_LE(result_size, 2);
2465 __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace));
2467 __ movp(kCCallArg1, r14);
2468 __ movp(kCCallArg2, r15);
2469 __ Move(kCCallArg3, ExternalReference::isolate_address(masm->isolate()));
2473 if (result_size > kMaxRegisterResultSize) {
2476 DCHECK_LE(result_size, 2);
2477 __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0));
2478 __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1));
2483 Label exception_returned;
2484 __ CompareRoot(rax, RootIndex::kException);
2485 __ j(equal, &exception_returned);
2489 if (FLAG_debug_code) {
2491 __ LoadRoot(r14, RootIndex::kTheHoleValue);
2492 ExternalReference pending_exception_address = ExternalReference::Create(
2493 IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2494 Operand pending_exception_operand =
2495 masm->ExternalReferenceAsOperand(pending_exception_address);
2496 __ cmpp(r14, pending_exception_operand);
2497 __ j(equal, &okay, Label::kNear);
2503 __ LeaveExitFrame(save_doubles == kSaveFPRegs, argv_mode == kArgvOnStack);
2507 __ bind(&exception_returned);
2509 ExternalReference pending_handler_context_address = ExternalReference::Create(
2510 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2511 ExternalReference pending_handler_entrypoint_address =
2512 ExternalReference::Create(
2513 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2514 ExternalReference pending_handler_fp_address = ExternalReference::Create(
2515 IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2516 ExternalReference pending_handler_sp_address = ExternalReference::Create(
2517 IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2521 ExternalReference find_handler =
2522 ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2524 FrameScope scope(masm, StackFrame::MANUAL);
2525 __ movp(arg_reg_1, Immediate(0));
2526 __ movp(arg_reg_2, Immediate(0));
2527 __ Move(arg_reg_3, ExternalReference::isolate_address(masm->isolate()));
2528 __ PrepareCallCFunction(3);
2529 __ CallCFunction(find_handler, 3);
2533 masm->ExternalReferenceAsOperand(pending_handler_context_address));
2534 __ movp(rsp, masm->ExternalReferenceAsOperand(pending_handler_sp_address));
2535 __ movp(rbp, masm->ExternalReferenceAsOperand(pending_handler_fp_address));
2541 __ j(zero, &skip, Label::kNear);
2542 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
2549 __ ResetSpeculationPoisonRegister();
2553 masm->ExternalReferenceAsOperand(pending_handler_entrypoint_address));
2557 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2558 Label check_negative, process_64_bits, done;
2561 const int kArgumentOffset = 4 * kRegisterSize;
2563 MemOperand mantissa_operand(MemOperand(rsp, kArgumentOffset));
2564 MemOperand exponent_operand(
2565 MemOperand(rsp, kArgumentOffset + kDoubleSize / 2));
2568 MemOperand return_operand = mantissa_operand;
2570 Register scratch1 = rbx;
2574 Register result_reg = rax;
2578 Register save_reg = rax;
2583 __ movl(scratch1, mantissa_operand);
2584 __ Movsd(kScratchDoubleReg, mantissa_operand);
2585 __ movl(rcx, exponent_operand);
2587 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
2588 __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
2589 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
2590 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
2591 __ j(below, &process_64_bits, Label::kNear);
2594 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
2595 __ subl(rcx, Immediate(delta));
2596 __ xorl(result_reg, result_reg);
2597 __ cmpl(rcx, Immediate(31));
2598 __ j(above, &done, Label::kNear);
2599 __ shll_cl(scratch1);
2600 __ jmp(&check_negative, Label::kNear);
2602 __ bind(&process_64_bits);
2603 __ Cvttsd2siq(result_reg, kScratchDoubleReg);
2604 __ jmp(&done, Label::kNear);
2607 __ bind(&check_negative);
2608 __ movl(result_reg, scratch1);
2609 __ negl(result_reg);
2610 __ cmpl(exponent_operand, Immediate(0));
2611 __ cmovl(greater, result_reg, scratch1);
2615 __ movl(return_operand, result_reg);
2622 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2623 const Register exponent = rdx;
2624 const Register scratch = rcx;
2625 const XMMRegister double_result = xmm3;
2626 const XMMRegister double_base = xmm2;
2627 const XMMRegister double_exponent = xmm1;
2628 const XMMRegister double_scratch = xmm4;
2630 Label call_runtime, done, exponent_not_smi, int_exponent;
2633 __ movp(scratch, Immediate(1));
2634 __ Cvtlsi2sd(double_result, scratch);
2636 Label fast_power, try_arithmetic_simplification;
2638 __ DoubleToI(exponent, double_exponent, double_scratch,
2639 &try_arithmetic_simplification, &try_arithmetic_simplification);
2640 __ jmp(&int_exponent);
2642 __ bind(&try_arithmetic_simplification);
2643 __ Cvttsd2si(exponent, double_exponent);
2645 __ cmpl(exponent, Immediate(0x1));
2646 __ j(overflow, &call_runtime);
2649 Label fast_power_failed;
2650 __ bind(&fast_power);
2653 __ subp(rsp, Immediate(kDoubleSize));
2654 __ Movsd(Operand(rsp, 0), double_exponent);
2655 __ fld_d(Operand(rsp, 0));
2656 __ Movsd(Operand(rsp, 0), double_base);
2657 __ fld_d(Operand(rsp, 0));
2676 __ testb(rax, Immediate(0x5F));
2677 __ j(not_zero, &fast_power_failed, Label::kNear);
2678 __ fstp_d(Operand(rsp, 0));
2679 __ Movsd(double_result, Operand(rsp, 0));
2680 __ addp(rsp, Immediate(kDoubleSize));
2683 __ bind(&fast_power_failed);
2685 __ addp(rsp, Immediate(kDoubleSize));
2686 __ jmp(&call_runtime);
2689 __ bind(&int_exponent);
2690 const XMMRegister double_scratch2 = double_exponent;
2692 __ movp(scratch, exponent);
2693 __ Movsd(double_scratch, double_base);
2694 __ Movsd(double_scratch2, double_result);
2697 Label no_neg, while_true, while_false;
2698 __ testl(scratch, scratch);
2699 __ j(positive, &no_neg, Label::kNear);
2703 __ j(zero, &while_false, Label::kNear);
2704 __ shrl(scratch, Immediate(1));
2707 __ j(above, &while_true, Label::kNear);
2708 __ Movsd(double_result, double_scratch);
2709 __ j(zero, &while_false, Label::kNear);
2711 __ bind(&while_true);
2712 __ shrl(scratch, Immediate(1));
2713 __ Mulsd(double_scratch, double_scratch);
2714 __ j(above, &while_true, Label::kNear);
2715 __ Mulsd(double_result, double_scratch);
2716 __ j(not_zero, &while_true);
2718 __ bind(&while_false);
2720 __ testl(exponent, exponent);
2721 __ j(greater, &done);
2722 __ Divsd(double_scratch2, double_result);
2723 __ Movsd(double_result, double_scratch2);
2726 __ Xorpd(double_scratch2, double_scratch2);
2727 __ Ucomisd(double_scratch2, double_result);
2731 __ j(not_equal, &done);
2732 __ Cvtlsi2sd(double_exponent, exponent);
2735 __ bind(&call_runtime);
2737 __ Movsd(xmm0, double_base);
2738 DCHECK(double_exponent == xmm1);
2740 AllowExternalCallThatCantCauseGC scope(masm);
2741 __ PrepareCallCFunction(2);
2742 __ CallCFunction(ExternalReference::power_double_double_function(), 2);
2745 __ Movsd(double_result, xmm0);
2753 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2754 ElementsKind kind) {
2755 Label not_zero_case, not_one_case;
2756 Label normal_sequence;
2759 __ j(not_zero, ¬_zero_case);
2760 __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2762 RelocInfo::CODE_TARGET);
2764 __ bind(¬_zero_case);
2765 __ cmpl(rax, Immediate(1));
2766 __ j(greater, ¬_one_case);
2768 if (IsFastPackedElementsKind(kind)) {
2771 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2772 __ movp(rcx, args.GetArgumentOperand(0));
2774 __ j(zero, &normal_sequence);
2776 __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2777 masm->isolate(), GetHoleyElementsKind(kind))
2779 RelocInfo::CODE_TARGET);
2782 __ bind(&normal_sequence);
2784 CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2786 RelocInfo::CODE_TARGET);
2788 __ bind(¬_one_case);
2791 __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue);
2792 Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
2793 __ Jump(code, RelocInfo::CODE_TARGET);
2798 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2806 if (FLAG_debug_code) {
2811 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2813 STATIC_ASSERT(kSmiTag == 0);
2814 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
2815 __ Check(not_smi, AbortReason::kUnexpectedInitialMapForArrayFunction);
2816 __ CmpObjectType(rcx, MAP_TYPE, rcx);
2817 __ Check(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
2821 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
2825 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
2827 __ DecodeField<Map::ElementsKindBits>(rcx);
2829 if (FLAG_debug_code) {
2831 __ cmpl(rcx, Immediate(PACKED_ELEMENTS));
2833 __ cmpl(rcx, Immediate(HOLEY_ELEMENTS));
2836 AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2840 Label fast_elements_case;
2841 __ cmpl(rcx, Immediate(PACKED_ELEMENTS));
2842 __ j(equal, &fast_elements_case);
2843 GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2845 __ bind(&fast_elements_case);
2846 GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2854 #endif // V8_TARGET_ARCH_X64