5 #if V8_TARGET_ARCH_S390 7 #include "src/assembler-inl.h" 8 #include "src/code-factory.h" 9 #include "src/code-stubs.h" 10 #include "src/counters.h" 11 #include "src/debug/debug.h" 12 #include "src/deoptimizer.h" 13 #include "src/frame-constants.h" 14 #include "src/frames.h" 15 #include "src/objects/js-generator.h" 16 #include "src/objects/smi.h" 17 #include "src/register-configuration.h" 18 #include "src/runtime/runtime.h" 19 #include "src/wasm/wasm-objects.h" 24 #define __ ACCESS_MASM(masm) 26 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
27 ExitFrameType exit_frame_type) {
28 __ Move(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
29 if (exit_frame_type == BUILTIN_EXIT) {
30 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
31 RelocInfo::CODE_TARGET);
33 DCHECK(exit_frame_type == EXIT);
34 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
35 RelocInfo::CODE_TARGET);
39 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
45 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
47 if (FLAG_debug_code) {
49 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
51 __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction,
53 __ CompareObjectType(r4, r5, r6, MAP_TYPE);
54 __ Assert(eq, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
59 __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
60 RelocInfo::CODE_TARGET);
63 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
64 Runtime::FunctionId function_id) {
71 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
76 __ Push(r2, r3, r5, r3);
78 __ CallRuntime(function_id, 1);
85 static_assert(kJavaScriptCallCodeStartRegister == r4,
"ABI mismatch");
86 __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
92 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
93 Label post_instantiation_deopt_entry;
105 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
112 __ PushRoot(RootIndex::kTheHoleValue);
114 __ la(r6, MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
127 __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2));
131 __ lay(ip, MemOperand(ip, -kPointerSize));
132 __ LoadP(r0, MemOperand(ip, r6));
133 __ StoreP(r0, MemOperand(ip, sp));
134 __ BranchOnCount(r1, &loop);
142 ParameterCount actual(r2);
143 __ InvokeFunction(r3, r5, actual, CALL_FUNCTION);
146 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
148 __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
153 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
155 __ SmiToPtrArrayOffset(r3, r3);
157 __ AddP(sp, sp, Operand(kPointerSize));
161 void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
162 Register scratch, Label* stack_overflow) {
166 __ LoadRoot(scratch, RootIndex::kRealStackLimit);
169 __ SubP(scratch, sp, scratch);
171 __ ShiftLeftP(r0, num_args, Operand(kPointerSizeLog2));
172 __ CmpP(scratch, r0);
173 __ ble(stack_overflow);
179 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
191 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
192 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
197 __ PushRoot(RootIndex::kUndefinedValue);
208 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
209 __ LoadlW(r6, FieldMemOperand(r6, SharedFunctionInfo::kFlagsOffset));
210 __ TestBitMask(r6, SharedFunctionInfo::IsDerivedConstructorBit::kMask, r0);
211 __ bne(¬_create_implicit_receiver);
214 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
216 __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
217 RelocInfo::CODE_TARGET);
218 __ b(&post_instantiation_deopt_entry);
221 __ bind(¬_create_implicit_receiver);
222 __ LoadRoot(r2, RootIndex::kTheHoleValue);
233 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
235 __ bind(&post_instantiation_deopt_entry);
255 __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
256 __ LoadP(r2, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
260 __ la(r6, MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
262 Label enough_stack_space, stack_overflow;
263 Generate_StackOverflowCheck(masm, r2, r7, &stack_overflow);
264 __ b(&enough_stack_space);
266 __ bind(&stack_overflow);
268 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
269 __ CallRuntime(Runtime::kThrowStackOverflow);
273 __ bind(&enough_stack_space);
292 __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2));
296 __ lay(ip, MemOperand(ip, -kPointerSize));
297 __ LoadP(r0, MemOperand(ip, r6));
298 __ StoreP(r0, MemOperand(ip, sp));
299 __ BranchOnCount(r1, &loop);
303 ParameterCount actual(r2);
304 __ InvokeFunction(r3, r5, actual, CALL_FUNCTION);
316 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
320 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
325 Label use_receiver, do_throw, leave_frame;
328 __ JumpIfRoot(r2, RootIndex::kUndefinedValue, &use_receiver);
334 __ JumpIfSmi(r2, &use_receiver);
338 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
339 __ CompareObjectType(r2, r6, r6, FIRST_JS_RECEIVER_TYPE);
340 __ bge(&leave_frame);
344 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
348 __ bind(&use_receiver);
349 __ LoadP(r2, MemOperand(sp));
350 __ JumpIfRoot(r2, RootIndex::kTheHoleValue, &do_throw);
352 __ bind(&leave_frame);
354 __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
359 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
361 __ SmiToPtrArrayOffset(r3, r3);
363 __ AddP(sp, sp, Operand(kPointerSize));
367 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
368 Generate_JSBuiltinsConstructStubHelper(masm);
371 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
376 __ CompareObjectType(sfi_data, scratch1, scratch1, INTERPRETER_DATA_TYPE);
377 __ bne(&done, Label::kNear);
379 FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
384 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
390 __ AssertGeneratorObject(r3);
393 __ StoreP(r2, FieldMemOperand(r3, JSGeneratorObject::kInputOrDebugPosOffset),
395 __ RecordWriteField(r3, JSGeneratorObject::kInputOrDebugPosOffset, r2, r5,
396 kLRHasNotBeenSaved, kDontSaveFPRegs);
399 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
400 __ LoadP(cp, FieldMemOperand(r6, JSFunction::kContextOffset));
403 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
404 Label stepping_prepared;
405 ExternalReference debug_hook =
406 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
407 __ Move(ip, debug_hook);
408 __ LoadB(ip, MemOperand(ip));
409 __ CmpSmiLiteral(ip, Smi::zero(), r0);
410 __ bne(&prepare_step_in_if_stepping);
414 ExternalReference debug_suspended_generator =
415 ExternalReference::debug_suspended_generator_address(masm->isolate());
417 __ Move(ip, debug_suspended_generator);
418 __ LoadP(ip, MemOperand(ip));
420 __ beq(&prepare_step_in_suspended_generator);
421 __ bind(&stepping_prepared);
425 Label stack_overflow;
426 __ CompareRoot(sp, RootIndex::kRealStackLimit);
427 __ blt(&stack_overflow);
430 __ LoadP(ip, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset));
442 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
443 __ LoadLogicalHalfWordP(
444 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
445 __ LoadP(r4, FieldMemOperand(
446 r3, JSGeneratorObject::kParametersAndRegistersOffset));
448 Label loop, done_loop;
449 __ ShiftLeftP(r5, r5, Operand(kPointerSizeLog2));
454 __ LoadImmP(ip, Operand::Zero());
455 __ SubP(r5, Operand(kPointerSize));
458 __ lgfi(r1, Operand(-kPointerSize));
463 __ LoadP(r0, FieldMemOperand(r4, r5, FixedArray::kHeaderSize));
464 __ StoreP(r0, MemOperand(sp, ip));
467 __ lay(ip, MemOperand(ip, kPointerSize));
469 __ BranchRelativeOnIdxHighP(r5, r1, &loop);
475 if (FLAG_debug_code) {
476 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
477 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
478 GetSharedFunctionInfoBytecode(masm, r5, ip);
479 __ CompareObjectType(r5, r5, r5, BYTECODE_ARRAY_TYPE);
480 __ Assert(eq, AbortReason::kMissingBytecodeArray);
490 static_assert(kJavaScriptCallCodeStartRegister == r4,
"ABI mismatch");
491 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
492 __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
493 __ JumpToJSEntry(r4);
496 __ bind(&prepare_step_in_if_stepping);
498 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
501 __ PushRoot(RootIndex::kTheHoleValue);
502 __ CallRuntime(Runtime::kDebugOnFunctionCall);
504 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
506 __ b(&stepping_prepared);
508 __ bind(&prepare_step_in_suspended_generator);
510 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
512 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
514 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
516 __ b(&stepping_prepared);
518 __ bind(&stack_overflow);
520 FrameScope scope(masm, StackFrame::INTERNAL);
521 __ CallRuntime(Runtime::kThrowStackOverflow);
526 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
527 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
529 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
533 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
538 __ LoadRoot(r4, RootIndex::kRealStackLimit);
543 __ ShiftLeftP(r0, argc, Operand(kPointerSizeLog2));
548 __ CallRuntime(Runtime::kThrowStackOverflow);
553 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
566 FrameScope scope(masm, StackFrame::INTERNAL);
569 ExternalReference context_address = ExternalReference::Create(
570 IsolateAddressId::kContextAddress, masm->isolate());
571 __ Move(cp, context_address);
572 __ LoadP(cp, MemOperand(cp));
579 Generate_CheckStackOverflow(masm, r5);
590 Label argLoop, argExit;
592 __ ShiftLeftP(r7, r5, Operand(kPointerSizeLog2));
594 __ LoadImmP(r9, Operand(zero));
596 __ CmpPH(r7, Operand(zero));
597 __ beq(&argExit, Label::kNear);
598 __ lay(r7, MemOperand(r7, -kPointerSize));
599 __ LoadP(r8, MemOperand(r9, r6));
600 __ la(r9, MemOperand(r9, kPointerSize));
601 __ LoadP(r0, MemOperand(r8));
602 __ StoreP(r0, MemOperand(r7, sp));
613 __ LoadRoot(r6, RootIndex::kUndefinedValue);
619 Handle<Code> builtin = is_construct
620 ? BUILTIN_CODE(masm->isolate(), Construct)
621 : masm->isolate()->builtins()->Call();
622 __ Call(builtin, RelocInfo::CODE_TARGET);
632 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
633 Generate_JSEntryTrampolineHelper(masm,
false);
636 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
637 Generate_JSEntryTrampolineHelper(masm,
true);
640 static void ReplaceClosureCodeWithOptimizedCode(
641 MacroAssembler* masm, Register optimized_code, Register closure,
642 Register scratch1, Register scratch2, Register scratch3) {
644 __ StoreP(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset),
648 __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
649 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
653 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
654 Register args_count = scratch;
658 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
659 __ LoadlW(args_count,
660 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
663 __ LeaveFrame(StackFrame::INTERPRETED);
665 __ AddP(sp, sp, args_count);
669 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
671 OptimizationMarker marker,
672 Runtime::FunctionId function_id) {
674 __ CmpSmiLiteral(smi_entry, Smi::FromEnum(marker), r0);
676 GenerateTailCallToReturnedCode(masm, function_id);
680 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
681 Register feedback_vector,
682 Register scratch1, Register scratch2,
691 !AreAliased(feedback_vector, r2, r3, r5, scratch1, scratch2, scratch3));
693 Label optimized_code_slot_is_weak_ref, fallthrough;
695 Register closure = r3;
696 Register optimized_code_entry = scratch1;
699 optimized_code_entry,
700 FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
705 __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
711 __ CmpSmiLiteral(optimized_code_entry,
712 Smi::FromEnum(OptimizationMarker::kNone), r0);
713 __ beq(&fallthrough);
715 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
716 OptimizationMarker::kLogFirstExecution,
717 Runtime::kFunctionFirstExecution);
718 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
719 OptimizationMarker::kCompileOptimized,
720 Runtime::kCompileOptimized_NotConcurrent);
721 TailCallRuntimeIfMarkerEquals(
722 masm, optimized_code_entry,
723 OptimizationMarker::kCompileOptimizedConcurrent,
724 Runtime::kCompileOptimized_Concurrent);
729 if (FLAG_debug_code) {
731 optimized_code_entry,
732 Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0);
733 __ Assert(eq, AbortReason::kExpectedOptimizationSentinel);
735 __ b(&fallthrough, Label::kNear);
741 __ bind(&optimized_code_slot_is_weak_ref);
743 __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
747 Label found_deoptimized_code;
748 __ LoadP(scratch2, FieldMemOperand(optimized_code_entry,
749 Code::kCodeDataContainerOffset));
752 FieldMemOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset));
753 __ TestBit(scratch2, Code::kMarkedForDeoptimizationBit, r0);
754 __ bne(&found_deoptimized_code);
760 ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
761 scratch2, scratch3, feedback_vector);
762 static_assert(kJavaScriptCallCodeStartRegister == r4,
"ABI mismatch");
763 __ AddP(r4, optimized_code_entry,
764 Operand(Code::kHeaderSize - kHeapObjectTag));
769 __ bind(&found_deoptimized_code);
770 GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
775 __ bind(&fallthrough);
781 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
782 Register bytecode_array,
783 Register bytecode_offset,
784 Register bytecode, Register scratch1,
786 Register bytecode_size_table = scratch1;
787 Register scratch2 = bytecode;
788 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
790 __ Move(bytecode_size_table,
791 ExternalReference::bytecode_size_table_address());
794 Label process_bytecode, extra_wide;
795 STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
796 STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
797 STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
799 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
800 __ CmpP(bytecode, Operand(0x3));
801 __ bgt(&process_bytecode);
802 __ tmll(bytecode, Operand(0x1));
806 __ AddP(bytecode_offset, bytecode_offset, Operand(1));
807 __ LoadlB(bytecode, MemOperand(bytecode_array, bytecode_offset));
808 __ AddP(bytecode_size_table, bytecode_size_table,
809 Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
810 __ b(&process_bytecode);
812 __ bind(&extra_wide);
814 __ AddP(bytecode_offset, bytecode_offset, Operand(1));
815 __ LoadlB(bytecode, MemOperand(bytecode_array, bytecode_offset));
816 __ AddP(bytecode_size_table, bytecode_size_table,
817 Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
820 __ bind(&process_bytecode);
823 #define JUMP_IF_EQUAL(NAME) \ 825 Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \ 827 RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
831 __ ShiftLeftP(scratch2, bytecode, Operand(2));
832 __ LoadlW(scratch2, MemOperand(bytecode_size_table, scratch2));
833 __ AddP(bytecode_offset, bytecode_offset, scratch2);
852 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
853 Register closure = r3;
854 Register feedback_vector = r4;
857 __ LoadP(feedback_vector,
858 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
859 __ LoadP(feedback_vector,
860 FieldMemOperand(feedback_vector, Cell::kValueOffset));
863 MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, r6, r8, r7);
868 FrameScope frame_scope(masm, StackFrame::MANUAL);
869 __ PushStandardFrame(closure);
873 __ LoadP(r2, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
875 __ LoadP(kInterpreterBytecodeArrayRegister,
876 FieldMemOperand(r2, SharedFunctionInfo::kFunctionDataOffset));
877 GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, r6);
880 __ LoadW(r1, FieldMemOperand(feedback_vector,
881 FeedbackVector::kInvocationCountOffset));
882 __ AddP(r1, r1, Operand(1));
883 __ StoreW(r1, FieldMemOperand(feedback_vector,
884 FeedbackVector::kInvocationCountOffset));
887 if (FLAG_debug_code) {
888 __ TestIfSmi(kInterpreterBytecodeArrayRegister);
890 ne, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
891 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r2, no_reg,
892 BYTECODE_ARRAY_TYPE);
894 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
898 __ mov(r1, Operand(BytecodeArray::kNoAgeBytecodeAge));
899 __ StoreByte(r1, FieldMemOperand(kInterpreterBytecodeArrayRegister,
900 BytecodeArray::kBytecodeAgeOffset),
904 __ mov(kInterpreterBytecodeOffsetRegister,
905 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
908 __ SmiTag(r4, kInterpreterBytecodeOffsetRegister);
909 __ Push(kInterpreterBytecodeArrayRegister, r4);
914 __ LoadlW(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
915 BytecodeArray::kFrameSizeOffset));
920 __ LoadRoot(r0, RootIndex::kRealStackLimit);
921 __ CmpLogicalP(r8, r0);
923 __ CallRuntime(Runtime::kThrowStackOverflow);
929 __ LoadRoot(r8, RootIndex::kUndefinedValue);
930 __ ShiftRightP(r4, r4, Operand(kPointerSizeLog2));
931 __ LoadAndTestP(r4, r4);
936 __ SubP(r1, Operand(1));
943 Label no_incoming_new_target_or_generator_register;
944 __ LoadW(r8, FieldMemOperand(
945 kInterpreterBytecodeArrayRegister,
946 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
947 __ CmpP(r8, Operand::Zero());
948 __ beq(&no_incoming_new_target_or_generator_register);
949 __ ShiftLeftP(r8, r8, Operand(kPointerSizeLog2));
950 __ StoreP(r5, MemOperand(fp, r8));
951 __ bind(&no_incoming_new_target_or_generator_register);
954 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
958 __ bind(&do_dispatch);
960 kInterpreterDispatchTableRegister,
961 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
963 __ LoadlB(r5, MemOperand(kInterpreterBytecodeArrayRegister,
964 kInterpreterBytecodeOffsetRegister));
965 __ ShiftLeftP(r5, r5, Operand(kPointerSizeLog2));
966 __ LoadP(kJavaScriptCallCodeStartRegister,
967 MemOperand(kInterpreterDispatchTableRegister, r5));
968 __ Call(kJavaScriptCallCodeStartRegister);
970 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
976 __ LoadP(kInterpreterBytecodeArrayRegister,
977 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
978 __ LoadP(kInterpreterBytecodeOffsetRegister,
979 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
980 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
984 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
985 kInterpreterBytecodeOffsetRegister));
986 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
987 kInterpreterBytecodeOffsetRegister, r3, r4,
993 LeaveInterpreterFrame(masm, r4);
997 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
998 Register num_args, Register index,
999 Register count, Register scratch) {
1001 __ CmpP(count, Operand::Zero());
1003 __ AddP(index, index, Operand(kPointerSize));
1004 __ LoadRR(r0, count);
1006 __ LoadP(scratch, MemOperand(index, -kPointerSize));
1007 __ lay(index, MemOperand(index, -kPointerSize));
1009 __ SubP(r0, Operand(1));
1015 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1016 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1017 InterpreterPushArgsMode mode) {
1018 DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1026 Label stack_overflow;
1029 __ AddP(r5, r2, Operand(1));
1030 Generate_StackOverflowCheck(masm, r5, ip, &stack_overflow);
1033 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1034 __ PushRoot(RootIndex::kUndefinedValue);
1039 Generate_InterpreterPushArgs(masm, r5, r4, r5, r6);
1040 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1042 __ SubP(r2, r2, Operand(1));
1046 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1047 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1048 RelocInfo::CODE_TARGET);
1050 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1051 RelocInfo::CODE_TARGET);
1054 __ bind(&stack_overflow);
1056 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1063 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1064 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1072 Label stack_overflow;
1075 __ LoadImmP(r0, Operand::Zero());
1080 __ CmpP(r2, Operand::Zero());
1082 Generate_StackOverflowCheck(masm, r2, ip, &stack_overflow);
1083 Generate_InterpreterPushArgs(masm, r2, r6, r2, r7);
1086 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1088 __ SubP(r2, r2, Operand(1));
1090 __ AssertUndefinedOrAllocationSite(r4, r7);
1092 if (mode == InterpreterPushArgsMode::kArrayFunction) {
1093 __ AssertFunction(r3);
1097 Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1098 __ Jump(code, RelocInfo::CODE_TARGET);
1099 }
else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1101 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1102 RelocInfo::CODE_TARGET);
1104 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1106 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1109 __ bind(&stack_overflow);
1111 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1117 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1120 Label builtin_trampoline, trampoline_loaded;
1121 Smi interpreter_entry_return_pc_offset(
1122 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1123 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1129 __ LoadP(r4, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1130 __ LoadP(r4, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1131 __ LoadP(r4, FieldMemOperand(r4, SharedFunctionInfo::kFunctionDataOffset));
1132 __ CompareObjectType(r4, kInterpreterDispatchTableRegister,
1133 kInterpreterDispatchTableRegister,
1134 INTERPRETER_DATA_TYPE);
1135 __ bne(&builtin_trampoline);
1138 FieldMemOperand(r4, InterpreterData::kInterpreterTrampolineOffset));
1139 __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1140 __ b(&trampoline_loaded);
1142 __ bind(&builtin_trampoline);
1143 __ Move(r4, ExternalReference::
1144 address_of_interpreter_entry_trampoline_instruction_start(
1146 __ LoadP(r4, MemOperand(r4));
1148 __ bind(&trampoline_loaded);
1149 __ AddP(r14, r4, Operand(interpreter_entry_return_pc_offset->value()));
1153 kInterpreterDispatchTableRegister,
1154 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1157 __ LoadP(kInterpreterBytecodeArrayRegister,
1158 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1160 if (FLAG_debug_code) {
1162 __ TestIfSmi(kInterpreterBytecodeArrayRegister);
1164 ne, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1165 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
1166 BYTECODE_ARRAY_TYPE);
1168 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1172 __ LoadP(kInterpreterBytecodeOffsetRegister,
1173 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1174 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1177 __ LoadlB(ip, MemOperand(kInterpreterBytecodeArrayRegister,
1178 kInterpreterBytecodeOffsetRegister));
1179 __ ShiftLeftP(ip, ip, Operand(kPointerSizeLog2));
1180 __ LoadP(kJavaScriptCallCodeStartRegister,
1181 MemOperand(kInterpreterDispatchTableRegister, ip));
1182 __ Jump(kJavaScriptCallCodeStartRegister);
1185 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1187 __ LoadP(kInterpreterBytecodeArrayRegister,
1188 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1189 __ LoadP(kInterpreterBytecodeOffsetRegister,
1190 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1191 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1194 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
1195 kInterpreterBytecodeOffsetRegister));
1199 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1200 kInterpreterBytecodeOffsetRegister, r3, r4,
1204 __ SmiTag(r4, kInterpreterBytecodeOffsetRegister);
1206 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1208 Generate_InterpreterEnterBytecode(masm);
1211 __ bind(&if_return);
1212 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1215 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1216 Generate_InterpreterEnterBytecode(masm);
1219 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1227 FrameScope scope(masm, StackFrame::INTERNAL);
1233 __ Push(r2, r3, r5, r3);
1237 for (
int j = 0; j < 4; ++j) {
1240 __ CmpP(r6, Operand(j));
1243 for (
int i = j - 1;
i >= 0; --
i) {
1244 __ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1248 for (
int i = 0;
i < 3 - j; ++
i) {
1249 __ PushRoot(RootIndex::kUndefinedValue);
1256 __ bind(&args_done);
1259 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1261 __ JumpIfSmi(r2, &failed);
1266 scope.GenerateLeaveFrame();
1268 __ AddP(r6, r6, Operand(1));
1279 static_assert(kJavaScriptCallCodeStartRegister == r4,
"ABI mismatch");
1280 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
1281 __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1282 __ JumpToJSEntry(r4);
1286 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1287 bool java_script_builtin,
1289 const RegisterConfiguration* config(RegisterConfiguration::Default());
1290 int allocatable_register_count = config->num_allocatable_general_registers();
1296 sp, config->num_allocatable_general_registers() * kPointerSize +
1297 BuiltinContinuationFrameConstants::kFixedFrameSize));
1299 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
1300 int code = config->GetAllocatableGeneralCode(
i);
1301 __ Pop(Register::from_code(code));
1302 if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1303 __ SmiUntag(Register::from_code(code));
1308 MemOperand(sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1311 Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1314 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
1319 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1320 Generate_ContinueToBuiltinHelper(masm,
false,
false);
1323 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1324 MacroAssembler* masm) {
1325 Generate_ContinueToBuiltinHelper(masm,
false,
true);
1328 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1329 Generate_ContinueToBuiltinHelper(masm,
true,
false);
1332 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1333 MacroAssembler* masm) {
1334 Generate_ContinueToBuiltinHelper(masm,
true,
true);
1337 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1339 FrameScope scope(masm, StackFrame::INTERNAL);
1340 __ CallRuntime(Runtime::kNotifyDeoptimized);
1343 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r2.code());
1348 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1350 __ LoadP(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1351 __ LoadP(r2, MemOperand(r2, JavaScriptFrameConstants::kFunctionOffset));
1354 FrameScope scope(masm, StackFrame::INTERNAL);
1357 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1362 __ CmpSmiLiteral(r2, Smi::zero(), r0);
1370 __ LeaveFrame(StackFrame::STUB);
1374 __ LoadP(r3, FieldMemOperand(r2, Code::kDeoptimizationDataOffset));
1378 __ LoadP(r3, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(
1379 DeoptimizationData::kOsrPcOffsetIndex)));
1385 __ AddP(r0, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
1393 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1406 Register arg_size = r7;
1407 Register new_sp = r5;
1408 Register scratch = r6;
1409 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1410 __ AddP(new_sp, sp, arg_size);
1411 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
1412 __ LoadRR(r4, scratch);
1413 __ LoadP(r3, MemOperand(new_sp, 0));
1414 __ CmpP(arg_size, Operand(kPointerSize));
1416 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize));
1418 __ LoadP(r4, MemOperand(new_sp, 2 * -kPointerSize));
1420 __ LoadRR(sp, new_sp);
1421 __ StoreP(scratch, MemOperand(sp, 0));
1436 __ JumpIfRoot(r4, RootIndex::kNullValue, &no_arguments);
1437 __ JumpIfRoot(r4, RootIndex::kUndefinedValue, &no_arguments);
1440 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1441 RelocInfo::CODE_TARGET);
1445 __ bind(&no_arguments);
1447 __ LoadImmP(r2, Operand::Zero());
1448 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1453 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1458 __ CmpP(r2, Operand::Zero());
1459 __ bne(&done, Label::kNear);
1460 __ PushRoot(RootIndex::kUndefinedValue);
1461 __ AddP(r2, Operand(1));
1467 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
1468 __ LoadP(r3, MemOperand(sp, r4));
1478 __ AddP(r4, sp, r4);
1481 __ LoadP(ip, MemOperand(r4, -kPointerSize));
1482 __ StoreP(ip, MemOperand(r4));
1483 __ SubP(r4, Operand(kPointerSize));
1488 __ SubP(r2, Operand(1));
1493 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1496 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1510 Register arg_size = r7;
1511 Register new_sp = r5;
1512 Register scratch = r6;
1513 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1514 __ AddP(new_sp, sp, arg_size);
1515 __ LoadRoot(r3, RootIndex::kUndefinedValue);
1516 __ LoadRR(scratch, r3);
1518 __ CmpP(arg_size, Operand(kPointerSize));
1520 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize));
1522 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize));
1523 __ CmpP(arg_size, Operand(2 * kPointerSize));
1525 __ LoadP(r4, MemOperand(new_sp, 3 * -kPointerSize));
1527 __ LoadRR(sp, new_sp);
1528 __ StoreP(scratch, MemOperand(sp, 0));
1542 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1543 RelocInfo::CODE_TARGET);
1546 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1561 Register arg_size = r7;
1562 Register new_sp = r6;
1563 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1564 __ AddP(new_sp, sp, arg_size);
1565 __ LoadRoot(r3, RootIndex::kUndefinedValue);
1568 __ StoreP(r3, MemOperand(new_sp, 0));
1569 __ CmpP(arg_size, Operand(kPointerSize));
1571 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize));
1574 __ LoadP(r4, MemOperand(new_sp, 2 * -kPointerSize));
1575 __ CmpP(arg_size, Operand(2 * kPointerSize));
1577 __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize));
1579 __ LoadRR(sp, new_sp);
1598 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1599 RelocInfo::CODE_TARGET);
1602 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1604 __ Load(r6, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1613 __ lay(sp, MemOperand(sp, -5 * kPointerSize));
1617 __ StoreP(r14, MemOperand(sp, 4 * kPointerSize));
1618 __ StoreP(fp, MemOperand(sp, 3 * kPointerSize));
1619 __ StoreP(r6, MemOperand(sp, 2 * kPointerSize));
1620 __ StoreP(r3, MemOperand(sp, 1 * kPointerSize));
1621 __ StoreP(r2, MemOperand(sp, 0 * kPointerSize));
1622 __ Push(Smi::zero());
1624 MemOperand(sp, ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
1627 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1633 __ LoadP(r3, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1634 int stack_adjustment = kPointerSize;
1635 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1636 __ SmiToPtrArrayOffset(r3, r3);
1637 __ lay(sp, MemOperand(sp, r3));
1641 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1642 Handle<Code> code) {
1651 Register scratch = ip;
1653 if (masm->emit_debug_code()) {
1656 __ AssertNotSmi(r4);
1657 __ LoadP(scratch, FieldMemOperand(r4, HeapObject::kMapOffset));
1658 __ LoadHalfWordP(scratch,
1659 FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1660 __ CmpP(scratch, Operand(FIXED_ARRAY_TYPE));
1662 __ CmpP(scratch, Operand(FIXED_DOUBLE_ARRAY_TYPE));
1664 __ CmpP(r6, Operand::Zero());
1668 __ Abort(AbortReason::kOperandIsNotAFixedArray);
1674 Label stack_overflow;
1675 Generate_StackOverflowCheck(masm, r6, ip, &stack_overflow);
1679 Label loop, no_args, skip;
1680 __ CmpP(r6, Operand::Zero());
1683 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
1686 __ LoadP(ip, MemOperand(r4, kPointerSize));
1687 __ la(r4, MemOperand(r4, kPointerSize));
1688 __ CompareRoot(ip, RootIndex::kTheHoleValue);
1689 __ bne(&skip, Label::kNear);
1690 __ LoadRoot(ip, RootIndex::kUndefinedValue);
1693 __ BranchOnCount(r1, &loop);
1695 __ AddP(r2, r2, r6);
1699 __ Jump(code, RelocInfo::CODE_TARGET);
1701 __ bind(&stack_overflow);
1702 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1706 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1707 CallOrConstructMode mode,
1708 Handle<Code> code) {
1716 Register scratch = r8;
1718 if (mode == CallOrConstructMode::kConstruct) {
1719 Label new_target_constructor, new_target_not_constructor;
1720 __ JumpIfSmi(r5, &new_target_not_constructor);
1721 __ LoadP(scratch, FieldMemOperand(r5, HeapObject::kMapOffset));
1722 __ LoadlB(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1723 __ tmll(scratch, Operand(Map::IsConstructorBit::kShift));
1724 __ bne(&new_target_constructor);
1725 __ bind(&new_target_not_constructor);
1727 FrameScope scope(masm, StackFrame::MANUAL);
1728 __ EnterFrame(StackFrame::INTERNAL);
1730 __ CallRuntime(Runtime::kThrowNotConstructor);
1732 __ bind(&new_target_constructor);
1736 Label arguments_adaptor, arguments_done;
1737 __ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1738 __ LoadP(ip, MemOperand(r6, CommonFrameConstants::kContextOrFrameTypeOffset));
1739 __ CmpP(ip, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1740 __ beq(&arguments_adaptor);
1742 __ LoadP(r7, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1743 __ LoadP(r7, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
1744 __ LoadLogicalHalfWordP(
1746 FieldMemOperand(r7, SharedFunctionInfo::kFormalParameterCountOffset));
1749 __ b(&arguments_done);
1750 __ bind(&arguments_adaptor);
1753 __ LoadP(r7, MemOperand(r6, ArgumentsAdaptorFrameConstants::kLengthOffset));
1756 __ bind(&arguments_done);
1758 Label stack_done, stack_overflow;
1759 __ SubP(r7, r7, r4);
1760 __ CmpP(r7, Operand::Zero());
1761 __ ble(&stack_done);
1764 Generate_StackOverflowCheck(masm, r7, r4, &stack_overflow);
1769 __ AddP(r6, r6, Operand(kPointerSize));
1770 __ AddP(r2, r2, r7);
1773 __ ShiftLeftP(ip, r7, Operand(kPointerSizeLog2));
1774 __ LoadP(ip, MemOperand(r6, ip));
1776 __ SubP(r7, r7, Operand(1));
1777 __ CmpP(r7, Operand::Zero());
1783 __ bind(&stack_overflow);
1784 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1785 __ bind(&stack_done);
1788 __ Jump(code, RelocInfo::CODE_TARGET);
1792 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1793 ConvertReceiverMode mode) {
1798 __ AssertFunction(r3);
1802 Label class_constructor;
1803 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1804 __ LoadlW(r5, FieldMemOperand(r4, SharedFunctionInfo::kFlagsOffset));
1805 __ TestBitMask(r5, SharedFunctionInfo::IsClassConstructorBit::kMask, r0);
1806 __ bne(&class_constructor);
1811 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
1815 Operand(SharedFunctionInfo::IsStrictBit::kMask |
1816 SharedFunctionInfo::IsNativeBit::kMask));
1817 __ bne(&done_convert);
1826 if (mode == ConvertReceiverMode::kNullOrUndefined) {
1828 __ LoadGlobalProxy(r5);
1830 Label convert_to_object, convert_receiver;
1831 __ ShiftLeftP(r5, r2, Operand(kPointerSizeLog2));
1832 __ LoadP(r5, MemOperand(sp, r5));
1833 __ JumpIfSmi(r5, &convert_to_object);
1834 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1835 __ CompareObjectType(r5, r6, r6, FIRST_JS_RECEIVER_TYPE);
1836 __ bge(&done_convert);
1837 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1838 Label convert_global_proxy;
1839 __ JumpIfRoot(r5, RootIndex::kUndefinedValue, &convert_global_proxy);
1840 __ JumpIfNotRoot(r5, RootIndex::kNullValue, &convert_to_object);
1841 __ bind(&convert_global_proxy);
1844 __ LoadGlobalProxy(r5);
1846 __ b(&convert_receiver);
1848 __ bind(&convert_to_object);
1853 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1858 __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1859 RelocInfo::CODE_TARGET);
1865 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1866 __ bind(&convert_receiver);
1868 __ ShiftLeftP(r6, r2, Operand(kPointerSizeLog2));
1869 __ StoreP(r5, MemOperand(sp, r6));
1871 __ bind(&done_convert);
1880 __ LoadLogicalHalfWordP(
1881 r4, FieldMemOperand(r4, SharedFunctionInfo::kFormalParameterCountOffset));
1882 ParameterCount actual(r2);
1883 ParameterCount expected(r4);
1884 __ InvokeFunctionCode(r3, no_reg, expected, actual, JUMP_FUNCTION);
1887 __ bind(&class_constructor);
1889 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
1891 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
1897 void Generate_PushBoundArguments(MacroAssembler* masm) {
1905 Label no_bound_arguments;
1906 __ LoadP(r4, FieldMemOperand(r3, JSBoundFunction::kBoundArgumentsOffset));
1907 __ LoadP(r6, FieldMemOperand(r4, FixedArray::kLengthOffset));
1909 __ LoadAndTestP(r6, r6);
1910 __ beq(&no_bound_arguments);
1924 __ ShiftLeftP(r9, r6, Operand(kPointerSizeLog2));
1925 __ SubP(sp, sp, r9);
1929 __ CompareRoot(sp, RootIndex::kRealStackLimit);
1934 FrameScope scope(masm, StackFrame::MANUAL);
1935 __ EnterFrame(StackFrame::INTERNAL);
1936 __ CallRuntime(Runtime::kThrowStackOverflow);
1947 __ LoadImmP(r7, Operand::Zero());
1948 __ CmpP(r2, Operand::Zero());
1952 __ LoadP(r0, MemOperand(r8, r7));
1953 __ StoreP(r0, MemOperand(sp, r7));
1954 __ AddP(r7, r7, Operand(kPointerSize));
1955 __ BranchOnCount(r1, &loop);
1962 __ AddP(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1963 __ AddP(r4, r4, r9);
1966 __ LoadP(r0, MemOperand(r4, -kPointerSize));
1967 __ lay(r4, MemOperand(r4, -kPointerSize));
1968 __ StoreP(r0, MemOperand(sp, r7));
1969 __ AddP(r7, r7, Operand(kPointerSize));
1970 __ BranchOnCount(r1, &loop);
1971 __ AddP(r2, r2, r6);
1974 __ bind(&no_bound_arguments);
1980 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
1985 __ AssertBoundFunction(r3);
1988 __ LoadP(ip, FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset));
1989 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
1990 __ StoreP(ip, MemOperand(sp, r1));
1993 Generate_PushBoundArguments(masm);
1997 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
1998 __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
1999 RelocInfo::CODE_TARGET);
2003 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2009 Label non_callable, non_function, non_smi;
2010 __ JumpIfSmi(r3, &non_callable);
2012 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
2013 __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2014 RelocInfo::CODE_TARGET, eq);
2015 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
2016 __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2017 RelocInfo::CODE_TARGET, eq);
2020 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
2021 __ TestBit(r6, Map::IsCallableBit::kShift);
2022 __ beq(&non_callable);
2025 __ CmpP(r7, Operand(JS_PROXY_TYPE));
2026 __ bne(&non_function);
2027 __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
2031 __ bind(&non_function);
2033 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
2034 __ StoreP(r3, MemOperand(sp, r7));
2036 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r3);
2037 __ Jump(masm->isolate()->builtins()->CallFunction(
2038 ConvertReceiverMode::kNotNullOrUndefined),
2039 RelocInfo::CODE_TARGET);
2042 __ bind(&non_callable);
2044 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2046 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2051 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2057 __ AssertConstructor(r3, r1);
2058 __ AssertFunction(r3);
2062 __ LoadRoot(r4, RootIndex::kUndefinedValue);
2064 Label call_generic_stub;
2067 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2068 __ LoadlW(r6, FieldMemOperand(r6, SharedFunctionInfo::kFlagsOffset));
2069 __ AndP(r6, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2070 __ beq(&call_generic_stub);
2072 __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2073 RelocInfo::CODE_TARGET);
2075 __ bind(&call_generic_stub);
2076 __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2077 RelocInfo::CODE_TARGET);
2081 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2087 __ AssertConstructor(r3, r1);
2088 __ AssertBoundFunction(r3);
2091 Generate_PushBoundArguments(masm);
2098 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2103 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2104 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2108 void Builtins::Generate_Construct(MacroAssembler* masm) {
2117 Label non_constructor, non_proxy;
2118 __ JumpIfSmi(r3, &non_constructor);
2121 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
2122 __ LoadlB(r4, FieldMemOperand(r6, Map::kBitFieldOffset));
2123 __ TestBit(r4, Map::IsConstructorBit::kShift);
2124 __ beq(&non_constructor);
2127 __ CompareInstanceType(r6, r7, JS_FUNCTION_TYPE);
2128 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2129 RelocInfo::CODE_TARGET, eq);
2133 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
2134 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2135 RelocInfo::CODE_TARGET, eq);
2138 __ CmpP(r7, Operand(JS_PROXY_TYPE));
2140 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2141 RelocInfo::CODE_TARGET);
2144 __ bind(&non_proxy);
2147 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
2148 __ StoreP(r3, MemOperand(sp, r7));
2150 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r3);
2151 __ Jump(masm->isolate()->builtins()->CallFunction(),
2152 RelocInfo::CODE_TARGET);
2157 __ bind(&non_constructor);
2158 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2159 RelocInfo::CODE_TARGET);
2162 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2170 Label invoke, dont_adapt_arguments, stack_overflow;
2172 Label enough, too_few;
2173 __ tmll(r4, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2174 __ b(Condition(1), &dont_adapt_arguments);
2175 __ CmpLogicalP(r2, r4);
2180 EnterArgumentsAdaptorFrame(masm);
2181 Generate_StackOverflowCheck(masm, r4, r7, &stack_overflow);
2188 __ SmiToPtrArrayOffset(r2, r2);
2191 __ AddP(r2, r2, Operand(2 * kPointerSize));
2192 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
2193 __ SubP(r6, r2, r6);
2204 __ LoadP(r0, MemOperand(r2, 0));
2207 __ lay(r2, MemOperand(r2, -kPointerSize));
2216 EnterArgumentsAdaptorFrame(masm);
2217 Generate_StackOverflowCheck(masm, r4, r7, &stack_overflow);
2224 __ SmiToPtrArrayOffset(r2, r2);
2225 __ lay(r2, MemOperand(r2, fp));
2235 __ LoadP(r0, MemOperand(r2, 2 * kPointerSize));
2238 __ lay(r2, MemOperand(r2, -kPointerSize));
2244 __ LoadRoot(r0, RootIndex::kUndefinedValue);
2245 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
2246 __ SubP(r6, fp, r6);
2249 Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
2265 static_assert(kJavaScriptCallCodeStartRegister == r4,
"ABI mismatch");
2266 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
2267 __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
2271 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2274 LeaveArgumentsAdaptorFrame(masm);
2280 __ bind(&dont_adapt_arguments);
2281 static_assert(kJavaScriptCallCodeStartRegister == r4,
"ABI mismatch");
2282 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
2283 __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
2284 __ JumpToJSEntry(r4);
2286 __ bind(&stack_overflow);
2288 FrameScope frame(masm, StackFrame::MANUAL);
2289 __ CallRuntime(Runtime::kThrowStackOverflow);
2294 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2297 __ SmiTag(kWasmCompileLazyFuncIndexRegister,
2298 kWasmCompileLazyFuncIndexRegister);
2300 HardAbortScope hard_abort(masm);
2301 FrameAndConstantPoolScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2306 constexpr RegList gp_regs = Register::ListOf<r2, r3, r4, r5, r6>();
2307 #if V8_TARGET_ARCH_S390X 2308 constexpr RegList fp_regs = DoubleRegister::ListOf<d0, d2, d4, d6>();
2310 constexpr RegList fp_regs = DoubleRegister::ListOf<d0, d2>();
2312 __ MultiPush(gp_regs);
2313 __ MultiPushDoubles(fp_regs);
2317 __ Push(kWasmInstanceRegister, r7);
2319 __ LoadP(r4, FieldMemOperand(kWasmInstanceRegister,
2320 WasmInstanceObject::kCEntryStubOffset));
2323 __ LoadSmiLiteral(cp, Smi::zero());
2324 __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, r4);
2329 __ MultiPopDoubles(fp_regs);
2330 __ MultiPop(gp_regs);
2336 void Builtins::Generate_CEntry(MacroAssembler* masm,
int result_size,
2337 SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2338 bool builtin_exit_frame) {
2351 if (argv_mode == kArgvInRegister) {
2356 __ ShiftLeftP(r3, r2, Operand(kPointerSizeLog2));
2357 __ lay(r3, MemOperand(r3, sp, -kPointerSize));
2361 FrameScope scope(masm, StackFrame::MANUAL);
2364 int arg_stack_space = 1;
2367 bool needs_return_buffer =
2368 result_size == 2 && !ABI_RETURNS_OBJECTPAIR_IN_REGS;
2369 if (needs_return_buffer) {
2370 arg_stack_space += result_size;
2373 #if V8_TARGET_ARCH_S390X 2375 arg_stack_space += 2;
2379 save_doubles, arg_stack_space,
2380 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2391 Register isolate_reg = r4;
2392 if (needs_return_buffer) {
2399 __ la(r2, MemOperand(sp, (kStackFrameExtraParamSlot + 1) * kPointerSize));
2406 __ Move(isolate_reg, ExternalReference::isolate_address(masm->isolate()));
2408 Register target = r7;
2416 __ larl(r14, &return_label);
2417 __ StoreP(r14, MemOperand(sp, kStackFrameRASlot * kPointerSize));
2423 __ bind(&return_label);
2428 if (needs_return_buffer) {
2430 __ LoadP(r3, MemOperand(r2, kPointerSize));
2431 __ LoadP(r2, MemOperand(r2));
2435 Label exception_returned;
2436 __ CompareRoot(r2, RootIndex::kException);
2437 __ beq(&exception_returned, Label::kNear);
2441 if (FLAG_debug_code) {
2443 ExternalReference pending_exception_address = ExternalReference::Create(
2444 IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2445 __ Move(r1, pending_exception_address);
2446 __ LoadP(r1, MemOperand(r1));
2447 __ CompareRoot(r1, RootIndex::kTheHoleValue);
2449 __ beq(&okay, Label::kNear);
2450 __ stop(
"Unexpected pending exception");
2458 Register argc = argv_mode == kArgvInRegister
2463 __ LeaveExitFrame(save_doubles, argc);
2467 __ bind(&exception_returned);
2469 ExternalReference pending_handler_context_address = ExternalReference::Create(
2470 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2471 ExternalReference pending_handler_entrypoint_address =
2472 ExternalReference::Create(
2473 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2474 ExternalReference pending_handler_fp_address = ExternalReference::Create(
2475 IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2476 ExternalReference pending_handler_sp_address = ExternalReference::Create(
2477 IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2481 ExternalReference find_handler =
2482 ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2484 FrameScope scope(masm, StackFrame::MANUAL);
2485 __ PrepareCallCFunction(3, 0, r2);
2486 __ LoadImmP(r2, Operand::Zero());
2487 __ LoadImmP(r3, Operand::Zero());
2488 __ Move(r4, ExternalReference::isolate_address(masm->isolate()));
2489 __ CallCFunction(find_handler, 3);
2493 __ Move(cp, pending_handler_context_address);
2494 __ LoadP(cp, MemOperand(cp));
2495 __ Move(sp, pending_handler_sp_address);
2496 __ LoadP(sp, MemOperand(sp));
2497 __ Move(fp, pending_handler_fp_address);
2498 __ LoadP(fp, MemOperand(fp));
2503 __ CmpP(cp, Operand::Zero());
2504 __ beq(&skip, Label::kNear);
2505 __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2512 __ ResetSpeculationPoisonRegister();
2515 __ Move(r3, pending_handler_entrypoint_address);
2516 __ LoadP(r3, MemOperand(r3));
2520 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2521 Label out_of_range, only_low, negate, done, fastpath_done;
2522 Register result_reg = r2;
2524 HardAbortScope hard_abort(masm);
2527 Register scratch = GetRegisterThatIsNotOneOf(result_reg);
2528 Register scratch_low = GetRegisterThatIsNotOneOf(result_reg, scratch);
2529 Register scratch_high =
2530 GetRegisterThatIsNotOneOf(result_reg, scratch, scratch_low);
2531 DoubleRegister double_scratch = kScratchDoubleReg;
2533 __ Push(result_reg, scratch);
2535 int argument_offset = 2 * kPointerSize;
2538 __ LoadDouble(double_scratch, MemOperand(sp, argument_offset));
2541 __ ConvertDoubleToInt64(result_reg, double_scratch);
2544 __ TestIfInt32(result_reg);
2545 __ beq(&fastpath_done, Label::kNear);
2547 __ Push(scratch_high, scratch_low);
2549 argument_offset += 2 * kPointerSize;
2551 __ LoadlW(scratch_high,
2552 MemOperand(sp, argument_offset + Register::kExponentOffset));
2553 __ LoadlW(scratch_low,
2554 MemOperand(sp, argument_offset + Register::kMantissaOffset));
2556 __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask);
2559 STATIC_ASSERT(HeapNumber::kExponentBias + 1 == 1024);
2560 __ SubP(scratch, Operand(HeapNumber::kExponentBias + 1));
2565 __ CmpP(scratch, Operand(83));
2566 __ bge(&out_of_range, Label::kNear);
2573 __ Load(r0, Operand(51));
2574 __ SubP(scratch, r0, scratch);
2575 __ CmpP(scratch, Operand::Zero());
2576 __ ble(&only_low, Label::kNear);
2579 __ ShiftRight(scratch_low, scratch_low, scratch);
2583 __ Load(r0, Operand(32));
2584 __ SubP(scratch, r0, scratch);
2585 __ ExtractBitMask(result_reg, scratch_high, HeapNumber::kMantissaMask);
2587 STATIC_ASSERT(HeapNumber::kMantissaBitsInTopWord >= 16);
2588 __ Load(r0, Operand(1 << ((HeapNumber::kMantissaBitsInTopWord)-16)));
2589 __ ShiftLeftP(r0, r0, Operand(16));
2590 __ OrP(result_reg, result_reg, r0);
2591 __ ShiftLeft(r0, result_reg, scratch);
2592 __ OrP(result_reg, scratch_low, r0);
2593 __ b(&negate, Label::kNear);
2595 __ bind(&out_of_range);
2596 __ mov(result_reg, Operand::Zero());
2597 __ b(&done, Label::kNear);
2602 __ LoadComplementRR(scratch, scratch);
2603 __ ShiftLeft(result_reg, scratch_low, scratch);
2612 __ ShiftRightArith(r0, scratch_high, Operand(31));
2613 #if V8_TARGET_ARCH_S390X 2615 __ ShiftRightP(r0, r0, Operand(32));
2617 __ XorP(result_reg, r0);
2618 __ ShiftRight(r0, scratch_high, Operand(31));
2619 __ AddP(result_reg, r0);
2622 __ Pop(scratch_high, scratch_low);
2623 argument_offset -= 2 * kPointerSize;
2625 __ bind(&fastpath_done);
2626 __ StoreP(result_reg, MemOperand(sp, argument_offset));
2627 __ Pop(result_reg, scratch);
2632 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2633 const Register exponent = r4;
2634 const DoubleRegister double_base = d1;
2635 const DoubleRegister double_exponent = d2;
2636 const DoubleRegister double_result = d3;
2637 const DoubleRegister double_scratch = d0;
2638 const Register scratch = r1;
2639 const Register scratch2 = r9;
2641 Label call_runtime, done, int_exponent;
2644 __ TryDoubleToInt32Exact(scratch, double_exponent, scratch2, double_scratch);
2645 __ beq(&int_exponent, Label::kNear);
2649 AllowExternalCallThatCantCauseGC scope(masm);
2650 __ PrepareCallCFunction(0, 2, scratch);
2651 __ MovToFloatParameters(double_base, double_exponent);
2652 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2655 __ MovFromFloatResult(double_result);
2659 __ bind(&int_exponent);
2663 __ LoadRR(exponent, scratch);
2665 __ ldr(double_scratch, double_base);
2666 __ LoadImmP(scratch2, Operand(1));
2667 __ ConvertIntToDouble(double_result, scratch2);
2670 Label positive_exponent;
2671 __ CmpP(scratch, Operand::Zero());
2672 __ bge(&positive_exponent, Label::kNear);
2673 __ LoadComplementRR(scratch, scratch);
2674 __ bind(&positive_exponent);
2676 Label while_true, no_carry, loop_end;
2677 __ bind(&while_true);
2678 __ mov(scratch2, Operand(1));
2679 __ AndP(scratch2, scratch);
2680 __ beq(&no_carry, Label::kNear);
2681 __ mdbr(double_result, double_scratch);
2683 __ ShiftRightP(scratch, scratch, Operand(1));
2684 __ LoadAndTestP(scratch, scratch);
2685 __ beq(&loop_end, Label::kNear);
2686 __ mdbr(double_scratch, double_scratch);
2690 __ CmpP(exponent, Operand::Zero());
2694 __ ldr(double_scratch, double_result);
2695 __ LoadImmP(scratch2, Operand(1));
2696 __ ConvertIntToDouble(double_result, scratch2);
2697 __ ddbr(double_result, double_scratch);
2701 __ lzdr(kDoubleRegZero);
2702 __ cdbr(double_result, kDoubleRegZero);
2703 __ bne(&done, Label::kNear);
2706 __ ConvertIntToDouble(double_exponent, exponent);
2711 AllowExternalCallThatCantCauseGC scope(masm);
2712 __ PrepareCallCFunction(0, 2, scratch);
2713 __ MovToFloatParameters(double_base, double_exponent);
2714 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2717 __ MovFromFloatResult(double_result);
2725 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2726 ElementsKind kind) {
2729 __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue);
2731 __ CmpLogicalP(r2, Operand(1));
2733 __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2735 RelocInfo::CODE_TARGET, lt);
2737 __ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
2738 RelocInfo::CODE_TARGET, gt);
2740 if (IsFastPackedElementsKind(kind)) {
2743 __ LoadP(r5, MemOperand(sp, 0));
2744 __ CmpP(r5, Operand::Zero());
2746 __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2747 masm->isolate(), GetHoleyElementsKind(kind))
2749 RelocInfo::CODE_TARGET, ne);
2753 CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2755 RelocInfo::CODE_TARGET);
2760 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2768 if (FLAG_debug_code) {
2773 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
2776 __ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction, cr0);
2777 __ CompareObjectType(r5, r5, r6, MAP_TYPE);
2778 __ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction);
2782 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
2784 __ LoadlB(r5, FieldMemOperand(r5, Map::kBitField2Offset));
2786 __ DecodeField<Map::ElementsKindBits>(r5);
2788 if (FLAG_debug_code) {
2790 __ CmpP(r5, Operand(PACKED_ELEMENTS));
2792 __ CmpP(r5, Operand(HOLEY_ELEMENTS));
2795 AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2799 Label fast_elements_case;
2800 __ CmpP(r5, Operand(PACKED_ELEMENTS));
2801 __ beq(&fast_elements_case);
2802 GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2804 __ bind(&fast_elements_case);
2805 GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2813 #endif // V8_TARGET_ARCH_S390