5 #if V8_TARGET_ARCH_MIPS 7 #include "src/code-factory.h" 8 #include "src/code-stubs.h" 9 #include "src/counters.h" 10 #include "src/debug/debug.h" 11 #include "src/deoptimizer.h" 12 #include "src/frame-constants.h" 13 #include "src/frames.h" 14 #include "src/mips/constants-mips.h" 15 #include "src/objects-inl.h" 16 #include "src/objects/js-generator.h" 17 #include "src/objects/smi.h" 18 #include "src/register-configuration.h" 19 #include "src/runtime/runtime.h" 20 #include "src/wasm/wasm-objects.h" 25 #define __ ACCESS_MASM(masm) 27 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
28 ExitFrameType exit_frame_type) {
29 __ li(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
30 if (exit_frame_type == BUILTIN_EXIT) {
31 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
32 RelocInfo::CODE_TARGET);
34 DCHECK(exit_frame_type == EXIT);
35 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
36 RelocInfo::CODE_TARGET);
40 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
46 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
48 if (FLAG_debug_code) {
50 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
52 __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction,
53 t0, Operand(zero_reg));
54 __ GetObjectType(a2, a3, t0);
55 __ Assert(eq, AbortReason::kUnexpectedInitialMapForInternalArrayFunction,
56 t0, Operand(MAP_TYPE));
61 __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
62 RelocInfo::CODE_TARGET);
65 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
66 Runtime::FunctionId function_id) {
73 FrameScope scope(masm, StackFrame::INTERNAL);
77 __ Push(a0, a1, a3, a1);
79 __ CallRuntime(function_id, 1);
86 static_assert(kJavaScriptCallCodeStartRegister == a2,
"ABI mismatch");
87 __ Addu(a2, v0, Code::kHeaderSize - kHeapObjectTag);
93 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
105 FrameScope scope(masm, StackFrame::CONSTRUCT);
113 __ PushRoot(RootIndex::kTheHoleValue);
116 __ Addu(t2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
132 __ Lsa(t0, t2, t3, kPointerSizeLog2);
133 __ lw(t1, MemOperand(t0));
136 __ Addu(t3, t3, Operand(-1));
137 __ Branch(&loop, greater_equal, t3, Operand(zero_reg));
143 ParameterCount actual(a0);
144 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION);
147 __ lw(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
149 __ lw(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
154 __ Lsa(sp, sp, a1, kPointerSizeLog2 - 1);
155 __ Addu(sp, sp, kPointerSize);
159 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
160 Register scratch1, Register scratch2,
161 Label* stack_overflow) {
165 __ LoadRoot(scratch1, RootIndex::kRealStackLimit);
168 __ subu(scratch1, sp, scratch1);
170 __ sll(scratch2, num_args, kPointerSizeLog2);
172 __ Branch(stack_overflow, le, scratch1, Operand(scratch2));
178 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
190 FrameScope scope(masm, StackFrame::CONSTRUCT);
191 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
196 __ PushRoot(RootIndex::kTheHoleValue);
207 __ lw(t2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
208 __ lw(t2, FieldMemOperand(t2, SharedFunctionInfo::kFlagsOffset));
209 __ And(t2, t2, Operand(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
210 __ Branch(¬_create_implicit_receiver, ne, t2, Operand(zero_reg));
213 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
215 __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
216 RelocInfo::CODE_TARGET);
217 __ Branch(&post_instantiation_deopt_entry);
220 __ bind(¬_create_implicit_receiver);
221 __ LoadRoot(v0, RootIndex::kTheHoleValue);
232 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
234 __ bind(&post_instantiation_deopt_entry);
254 __ lw(a1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
255 __ lw(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
259 __ Addu(t2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
261 Label enough_stack_space, stack_overflow;
262 Generate_StackOverflowCheck(masm, a0, t0, t1, &stack_overflow);
263 __ Branch(&enough_stack_space);
265 __ bind(&stack_overflow);
267 __ lw(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
268 __ CallRuntime(Runtime::kThrowStackOverflow);
272 __ bind(&enough_stack_space);
291 __ Lsa(t0, t2, t3, kPointerSizeLog2);
292 __ lw(t1, MemOperand(t0));
295 __ Addu(t3, t3, Operand(-1));
296 __ Branch(&loop, greater_equal, t3, Operand(zero_reg));
299 ParameterCount actual(a0);
300 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION);
312 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
316 __ lw(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
321 Label use_receiver, do_throw, leave_frame;
324 __ JumpIfRoot(v0, RootIndex::kUndefinedValue, &use_receiver);
330 __ JumpIfSmi(v0, &use_receiver);
334 __ GetObjectType(v0, t2, t2);
335 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
336 __ Branch(&leave_frame, greater_equal, t2, Operand(FIRST_JS_RECEIVER_TYPE));
337 __ Branch(&use_receiver);
340 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
344 __ bind(&use_receiver);
345 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
346 __ JumpIfRoot(v0, RootIndex::kTheHoleValue, &do_throw);
348 __ bind(&leave_frame);
350 __ lw(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
354 __ Lsa(sp, sp, a1, kPointerSizeLog2 - kSmiTagSize);
355 __ Addu(sp, sp, kPointerSize);
359 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
360 Generate_JSBuiltinsConstructStubHelper(masm);
363 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
364 FrameScope scope(masm, StackFrame::INTERNAL);
366 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
370 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
375 __ LoadRoot(a2, RootIndex::kRealStackLimit);
380 __ sll(t3, argc, kPointerSizeLog2);
382 __ Branch(&okay, gt, a2, Operand(t3));
385 __ CallRuntime(Runtime::kThrowStackOverflow);
390 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
402 FrameScope scope(masm, StackFrame::INTERNAL);
405 ExternalReference context_address = ExternalReference::Create(
406 IsolateAddressId::kContextAddress, masm->isolate());
407 __ li(cp, context_address);
408 __ lw(cp, MemOperand(cp));
415 Generate_CheckStackOverflow(masm, a3);
424 __ Lsa(t2, s0, a3, kPointerSizeLog2);
429 __ lw(t0, MemOperand(s0));
430 __ addiu(s0, s0, kPointerSize);
431 __ lw(t0, MemOperand(t0));
434 __ Branch(&loop, ne, s0, Operand(t2));
442 __ LoadRoot(t0, RootIndex::kUndefinedValue);
452 Handle<Code> builtin = is_construct
453 ? BUILTIN_CODE(masm->isolate(), Construct)
454 : masm->isolate()->builtins()->Call();
455 __ Call(builtin, RelocInfo::CODE_TARGET);
463 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
464 Generate_JSEntryTrampolineHelper(masm,
false);
467 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
468 Generate_JSEntryTrampolineHelper(masm,
true);
471 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
476 __ GetObjectType(sfi_data, scratch1, scratch1);
477 __ Branch(&done, ne, scratch1, Operand(INTERPRETER_DATA_TYPE));
479 FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
485 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
492 __ AssertGeneratorObject(a1);
495 __ sw(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset));
496 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3,
497 kRAHasNotBeenSaved, kDontSaveFPRegs);
500 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
501 __ lw(cp, FieldMemOperand(t0, JSFunction::kContextOffset));
504 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
505 Label stepping_prepared;
506 ExternalReference debug_hook =
507 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
508 __ li(t1, debug_hook);
509 __ lb(t1, MemOperand(t1));
510 __ Branch(&prepare_step_in_if_stepping, ne, t1, Operand(zero_reg));
513 ExternalReference debug_suspended_generator =
514 ExternalReference::debug_suspended_generator_address(masm->isolate());
515 __ li(t1, debug_suspended_generator);
516 __ lw(t1, MemOperand(t1));
517 __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(t1));
518 __ bind(&stepping_prepared);
522 Label stack_overflow;
523 __ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
524 __ Branch(&stack_overflow, lo, sp, Operand(kScratchReg));
527 __ lw(t1, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
540 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
542 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
544 FieldMemOperand(a1, JSGeneratorObject::kParametersAndRegistersOffset));
546 Label done_loop, loop;
547 __ Move(t2, zero_reg);
549 __ Subu(a3, a3, Operand(1));
550 __ Branch(&done_loop, lt, a3, Operand(zero_reg));
551 __ Lsa(kScratchReg, t1, t2, kPointerSizeLog2);
552 __ lw(kScratchReg, FieldMemOperand(kScratchReg, FixedArray::kHeaderSize));
553 __ Push(kScratchReg);
554 __ Addu(t2, t2, Operand(1));
560 if (FLAG_debug_code) {
561 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
562 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
563 GetSharedFunctionInfoBytecode(masm, a3, a0);
564 __ GetObjectType(a3, a3, a3);
565 __ Assert(eq, AbortReason::kMissingBytecodeArray, a3,
566 Operand(BYTECODE_ARRAY_TYPE));
571 __ lw(a0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
572 __ lhu(a0, FieldMemOperand(
573 a0, SharedFunctionInfo::kFormalParameterCountOffset));
579 static_assert(kJavaScriptCallCodeStartRegister == a2,
"ABI mismatch");
580 __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
581 __ Addu(a2, a2, Code::kHeaderSize - kHeapObjectTag);
585 __ bind(&prepare_step_in_if_stepping);
587 FrameScope scope(masm, StackFrame::INTERNAL);
590 __ PushRoot(RootIndex::kTheHoleValue);
591 __ CallRuntime(Runtime::kDebugOnFunctionCall);
594 __ Branch(USE_DELAY_SLOT, &stepping_prepared);
595 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
597 __ bind(&prepare_step_in_suspended_generator);
599 FrameScope scope(masm, StackFrame::INTERNAL);
601 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
604 __ Branch(USE_DELAY_SLOT, &stepping_prepared);
605 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
607 __ bind(&stack_overflow);
609 FrameScope scope(masm, StackFrame::INTERNAL);
610 __ CallRuntime(Runtime::kThrowStackOverflow);
615 static void ReplaceClosureCodeWithOptimizedCode(
616 MacroAssembler* masm, Register optimized_code, Register closure,
617 Register scratch1, Register scratch2, Register scratch3) {
619 __ sw(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset));
620 __ mov(scratch1, optimized_code);
621 __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
622 kRAHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
626 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
627 Register args_count = scratch;
631 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
633 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
636 __ LeaveFrame(StackFrame::INTERPRETED);
639 __ Addu(sp, sp, args_count);
643 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
645 OptimizationMarker marker,
646 Runtime::FunctionId function_id) {
648 __ Branch(&no_match, ne, smi_entry, Operand(Smi::FromEnum(marker)));
649 GenerateTailCallToReturnedCode(masm, function_id);
653 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
654 Register feedback_vector,
655 Register scratch1, Register scratch2,
664 !AreAliased(feedback_vector, a0, a1, a3, scratch1, scratch2, scratch3));
666 Label optimized_code_slot_is_weak_ref, fallthrough;
668 Register closure = a1;
669 Register optimized_code_entry = scratch1;
671 __ lw(optimized_code_entry,
672 FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
677 __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
683 __ Branch(&fallthrough, eq, optimized_code_entry,
684 Operand(Smi::FromEnum(OptimizationMarker::kNone)));
686 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
687 OptimizationMarker::kLogFirstExecution,
688 Runtime::kFunctionFirstExecution);
689 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
690 OptimizationMarker::kCompileOptimized,
691 Runtime::kCompileOptimized_NotConcurrent);
692 TailCallRuntimeIfMarkerEquals(
693 masm, optimized_code_entry,
694 OptimizationMarker::kCompileOptimizedConcurrent,
695 Runtime::kCompileOptimized_Concurrent);
700 if (FLAG_debug_code) {
702 eq, AbortReason::kExpectedOptimizationSentinel,
703 optimized_code_entry,
704 Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
706 __ jmp(&fallthrough);
712 __ bind(&optimized_code_slot_is_weak_ref);
714 __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
718 Label found_deoptimized_code;
719 __ lw(scratch2, FieldMemOperand(optimized_code_entry,
720 Code::kCodeDataContainerOffset));
721 __ lw(scratch2, FieldMemOperand(
722 scratch2, CodeDataContainer::kKindSpecificFlagsOffset));
723 __ And(scratch2, scratch2, Operand(1 << Code::kMarkedForDeoptimizationBit));
724 __ Branch(&found_deoptimized_code, ne, scratch2, Operand(zero_reg));
730 ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
731 scratch2, scratch3, feedback_vector);
732 static_assert(kJavaScriptCallCodeStartRegister == a2,
"ABI mismatch");
733 __ Addu(a2, optimized_code_entry, Code::kHeaderSize - kHeapObjectTag);
738 __ bind(&found_deoptimized_code);
739 GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
744 __ bind(&fallthrough);
750 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
751 Register bytecode_array,
752 Register bytecode_offset,
753 Register bytecode, Register scratch1,
754 Register scratch2, Label* if_return) {
755 Register bytecode_size_table = scratch1;
756 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
759 __ li(bytecode_size_table, ExternalReference::bytecode_size_table_address());
762 Label process_bytecode, extra_wide;
763 STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
764 STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
765 STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
767 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
768 __ Branch(&process_bytecode, hi, bytecode, Operand(3));
769 __ And(scratch2, bytecode, Operand(1));
770 __ Branch(&extra_wide, ne, scratch2, Operand(zero_reg));
773 __ Addu(bytecode_offset, bytecode_offset, Operand(1));
774 __ Addu(scratch2, bytecode_array, bytecode_offset);
775 __ lbu(bytecode, MemOperand(scratch2));
776 __ Addu(bytecode_size_table, bytecode_size_table,
777 Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
778 __ jmp(&process_bytecode);
780 __ bind(&extra_wide);
782 __ Addu(bytecode_offset, bytecode_offset, Operand(1));
783 __ Addu(scratch2, bytecode_array, bytecode_offset);
784 __ lbu(bytecode, MemOperand(scratch2));
785 __ Addu(bytecode_size_table, bytecode_size_table,
786 Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
788 __ bind(&process_bytecode);
791 #define JUMP_IF_EQUAL(NAME) \ 792 __ Branch(if_return, eq, bytecode, \ 793 Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); 794 RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
798 __ Lsa(scratch2, bytecode_size_table, bytecode, 2);
799 __ lw(scratch2, MemOperand(scratch2));
800 __ Addu(bytecode_offset, bytecode_offset, scratch2);
818 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
819 Register closure = a1;
820 Register feedback_vector = a2;
823 __ lw(feedback_vector,
824 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
825 __ lw(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset));
828 MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, t0, t3, t1);
833 FrameScope frame_scope(masm, StackFrame::MANUAL);
834 __ PushStandardFrame(closure);
838 __ lw(a0, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
839 __ lw(kInterpreterBytecodeArrayRegister,
840 FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
841 GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, t0);
844 __ lw(t0, FieldMemOperand(feedback_vector,
845 FeedbackVector::kInvocationCountOffset));
846 __ Addu(t0, t0, Operand(1));
847 __ sw(t0, FieldMemOperand(feedback_vector,
848 FeedbackVector::kInvocationCountOffset));
851 if (FLAG_debug_code) {
852 __ SmiTst(kInterpreterBytecodeArrayRegister, t0);
854 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
855 t0, Operand(zero_reg));
856 __ GetObjectType(kInterpreterBytecodeArrayRegister, t0, t0);
858 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
859 t0, Operand(BYTECODE_ARRAY_TYPE));
863 DCHECK_EQ(0, BytecodeArray::kNoAgeBytecodeAge);
864 __ sb(zero_reg, FieldMemOperand(kInterpreterBytecodeArrayRegister,
865 BytecodeArray::kBytecodeAgeOffset));
868 __ li(kInterpreterBytecodeOffsetRegister,
869 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
872 __ SmiTag(t0, kInterpreterBytecodeOffsetRegister);
873 __ Push(kInterpreterBytecodeArrayRegister, t0);
878 __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister,
879 BytecodeArray::kFrameSizeOffset));
883 __ Subu(t1, sp, Operand(t0));
884 __ LoadRoot(a2, RootIndex::kRealStackLimit);
885 __ Branch(&ok, hs, t1, Operand(a2));
886 __ CallRuntime(Runtime::kThrowStackOverflow);
892 __ LoadRoot(t1, RootIndex::kUndefinedValue);
893 __ Branch(&loop_check);
894 __ bind(&loop_header);
898 __ bind(&loop_check);
899 __ Subu(t0, t0, Operand(kPointerSize));
900 __ Branch(&loop_header, ge, t0, Operand(zero_reg));
905 Label no_incoming_new_target_or_generator_register;
906 __ lw(t1, FieldMemOperand(
907 kInterpreterBytecodeArrayRegister,
908 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
909 __ Branch(&no_incoming_new_target_or_generator_register, eq, t1,
911 __ Lsa(t1, fp, t1, kPointerSizeLog2);
912 __ sw(a3, MemOperand(t1));
913 __ bind(&no_incoming_new_target_or_generator_register);
916 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
921 __ bind(&do_dispatch);
922 __ li(kInterpreterDispatchTableRegister,
923 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
924 __ Addu(a0, kInterpreterBytecodeArrayRegister,
925 kInterpreterBytecodeOffsetRegister);
926 __ lbu(t3, MemOperand(a0));
927 __ Lsa(kScratchReg, kInterpreterDispatchTableRegister, t3, kPointerSizeLog2);
928 __ lw(kJavaScriptCallCodeStartRegister, MemOperand(kScratchReg));
929 __ Call(kJavaScriptCallCodeStartRegister);
930 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
936 __ lw(kInterpreterBytecodeArrayRegister,
937 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
938 __ lw(kInterpreterBytecodeOffsetRegister,
939 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
940 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
943 __ Addu(a1, kInterpreterBytecodeArrayRegister,
944 kInterpreterBytecodeOffsetRegister);
945 __ lbu(a1, MemOperand(a1));
946 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
947 kInterpreterBytecodeOffsetRegister, a1, a2, a3,
949 __ jmp(&do_dispatch);
953 LeaveInterpreterFrame(masm, t0);
957 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
958 Register num_args, Register index,
959 Register scratch, Register scratch2) {
961 __ mov(scratch2, num_args);
962 __ sll(scratch2, scratch2, kPointerSizeLog2);
963 __ Subu(scratch2, index, Operand(scratch2));
966 Label loop_header, loop_check;
967 __ Branch(&loop_check);
968 __ bind(&loop_header);
969 __ lw(scratch, MemOperand(index));
970 __ Addu(index, index, Operand(-kPointerSize));
972 __ bind(&loop_check);
973 __ Branch(&loop_header, gt, index, Operand(scratch2));
977 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
978 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
979 InterpreterPushArgsMode mode) {
980 DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
988 Label stack_overflow;
990 __ Addu(t0, a0, Operand(1));
992 Generate_StackOverflowCheck(masm, t0, t4, t1, &stack_overflow);
995 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
996 __ PushRoot(RootIndex::kUndefinedValue);
1001 Generate_InterpreterPushArgs(masm, t0, a2, t4, t1);
1003 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1005 __ Subu(a0, a0, Operand(1));
1009 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1010 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1011 RelocInfo::CODE_TARGET);
1013 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1014 RelocInfo::CODE_TARGET);
1017 __ bind(&stack_overflow);
1019 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1026 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1027 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1035 Label stack_overflow;
1040 Generate_StackOverflowCheck(masm, a0, t1, t0, &stack_overflow);
1043 Generate_InterpreterPushArgs(masm, a0, t4, t1, t0);
1045 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1047 __ Subu(a0, a0, Operand(1));
1049 __ AssertUndefinedOrAllocationSite(a2, t0);
1052 if (mode == InterpreterPushArgsMode::kArrayFunction) {
1053 __ AssertFunction(a1);
1057 __ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
1058 RelocInfo::CODE_TARGET);
1059 }
else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1061 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1062 RelocInfo::CODE_TARGET);
1064 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1066 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1069 __ bind(&stack_overflow);
1071 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1077 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1080 Label builtin_trampoline, trampoline_loaded;
1081 Smi interpreter_entry_return_pc_offset(
1082 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1083 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1089 __ lw(t0, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1090 __ lw(t0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
1091 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kFunctionDataOffset));
1092 __ GetObjectType(t0, kInterpreterDispatchTableRegister,
1093 kInterpreterDispatchTableRegister);
1094 __ Branch(&builtin_trampoline, ne, kInterpreterDispatchTableRegister,
1095 Operand(INTERPRETER_DATA_TYPE));
1097 __ lw(t0, FieldMemOperand(t0, InterpreterData::kInterpreterTrampolineOffset));
1098 __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1099 __ Branch(&trampoline_loaded);
1101 __ bind(&builtin_trampoline);
1102 __ li(t0, ExternalReference::
1103 address_of_interpreter_entry_trampoline_instruction_start(
1105 __ lw(t0, MemOperand(t0));
1107 __ bind(&trampoline_loaded);
1108 __ Addu(ra, t0, Operand(interpreter_entry_return_pc_offset->value()));
1111 __ li(kInterpreterDispatchTableRegister,
1112 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1115 __ lw(kInterpreterBytecodeArrayRegister,
1116 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1118 if (FLAG_debug_code) {
1120 __ SmiTst(kInterpreterBytecodeArrayRegister, kScratchReg);
1122 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1123 kScratchReg, Operand(zero_reg));
1124 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1126 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1127 a1, Operand(BYTECODE_ARRAY_TYPE));
1131 __ lw(kInterpreterBytecodeOffsetRegister,
1132 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1133 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1136 __ Addu(a1, kInterpreterBytecodeArrayRegister,
1137 kInterpreterBytecodeOffsetRegister);
1138 __ lbu(t3, MemOperand(a1));
1139 __ Lsa(a1, kInterpreterDispatchTableRegister, t3, kPointerSizeLog2);
1140 __ lw(kJavaScriptCallCodeStartRegister, MemOperand(a1));
1141 __ Jump(kJavaScriptCallCodeStartRegister);
1144 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1148 __ lw(kInterpreterBytecodeArrayRegister,
1149 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1150 __ lw(kInterpreterBytecodeOffsetRegister,
1151 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1152 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1155 __ Addu(a1, kInterpreterBytecodeArrayRegister,
1156 kInterpreterBytecodeOffsetRegister);
1157 __ lbu(a1, MemOperand(a1));
1161 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1162 kInterpreterBytecodeOffsetRegister, a1, a2, a3,
1166 __ SmiTag(a2, kInterpreterBytecodeOffsetRegister);
1167 __ sw(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1169 Generate_InterpreterEnterBytecode(masm);
1172 __ bind(&if_return);
1173 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1176 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1177 Generate_InterpreterEnterBytecode(masm);
1180 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1188 FrameScope scope(masm, StackFrame::INTERNAL);
1194 __ Push(a0, a1, a3, a1);
1198 for (
int j = 0; j < 4; ++j) {
1201 __ Branch(&over, ne, t4, Operand(j));
1203 for (
int i = j - 1;
i >= 0; --
i) {
1204 __ lw(t4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1208 for (
int i = 0;
i < 3 - j; ++
i) {
1209 __ PushRoot(RootIndex::kUndefinedValue);
1216 __ bind(&args_done);
1219 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1221 __ JumpIfSmi(v0, &failed);
1226 scope.GenerateLeaveFrame();
1228 __ Addu(t4, t4, Operand(1));
1229 __ Lsa(sp, sp, t4, kPointerSizeLog2);
1239 static_assert(kJavaScriptCallCodeStartRegister == a2,
"ABI mismatch");
1240 __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
1241 __ Addu(a2, a2, Code::kHeaderSize - kHeapObjectTag);
1246 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1247 bool java_script_builtin,
1249 const RegisterConfiguration* config(RegisterConfiguration::Default());
1250 int allocatable_register_count = config->num_allocatable_general_registers();
1256 sp, config->num_allocatable_general_registers() * kPointerSize +
1257 BuiltinContinuationFrameConstants::kFixedFrameSize));
1259 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
1260 int code = config->GetAllocatableGeneralCode(
i);
1261 __ Pop(Register::from_code(code));
1262 if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1263 __ SmiUntag(Register::from_code(code));
1266 __ lw(fp, MemOperand(
1267 sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1270 Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1272 __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1277 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1278 Generate_ContinueToBuiltinHelper(masm,
false,
false);
1281 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1282 MacroAssembler* masm) {
1283 Generate_ContinueToBuiltinHelper(masm,
false,
true);
1286 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1287 Generate_ContinueToBuiltinHelper(masm,
true,
false);
1290 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1291 MacroAssembler* masm) {
1292 Generate_ContinueToBuiltinHelper(masm,
true,
true);
1295 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1297 FrameScope scope(masm, StackFrame::INTERNAL);
1298 __ CallRuntime(Runtime::kNotifyDeoptimized);
1301 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
1302 __ lw(v0, MemOperand(sp, 0 * kPointerSize));
1303 __ Ret(USE_DELAY_SLOT);
1305 __ Addu(sp, sp, Operand(1 * kPointerSize));
1308 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1310 __ lw(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1311 __ lw(a0, MemOperand(a0, JavaScriptFrameConstants::kFunctionOffset));
1314 FrameScope scope(masm, StackFrame::INTERNAL);
1317 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1321 __ Ret(eq, v0, Operand(Smi::zero()));
1325 __ LeaveFrame(StackFrame::STUB);
1329 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1333 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1334 DeoptimizationData::kOsrPcOffsetIndex) -
1340 __ Addu(v0, v0, a1);
1341 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1348 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1361 Register scratch = t0;
1362 __ LoadRoot(a2, RootIndex::kUndefinedValue);
1365 __ sll(scratch, a0, kPointerSizeLog2);
1366 __ Addu(a0, sp, Operand(scratch));
1367 __ lw(a1, MemOperand(a0));
1368 __ Subu(a0, a0, Operand(kPointerSize));
1369 __ Branch(&no_arg, lt, a0, Operand(sp));
1370 __ lw(a2, MemOperand(a0));
1371 __ Subu(a0, a0, Operand(kPointerSize));
1372 __ Branch(&no_arg, lt, a0, Operand(sp));
1373 __ lw(a3, MemOperand(a0));
1375 __ Addu(sp, sp, Operand(scratch));
1376 __ sw(a2, MemOperand(sp));
1392 __ JumpIfRoot(a2, RootIndex::kNullValue, &no_arguments);
1393 __ JumpIfRoot(a2, RootIndex::kUndefinedValue, &no_arguments);
1396 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1397 RelocInfo::CODE_TARGET);
1401 __ bind(&no_arguments);
1403 __ mov(a0, zero_reg);
1404 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1409 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1414 __ Branch(&done, ne, a0, Operand(zero_reg));
1415 __ PushRoot(RootIndex::kUndefinedValue);
1416 __ Addu(a0, a0, Operand(1));
1422 __ Lsa(kScratchReg, sp, a0, kPointerSizeLog2);
1423 __ lw(a1, MemOperand(kScratchReg));
1433 __ Lsa(a2, sp, a0, kPointerSizeLog2);
1436 __ lw(kScratchReg, MemOperand(a2, -kPointerSize));
1437 __ sw(kScratchReg, MemOperand(a2));
1438 __ Subu(a2, a2, Operand(kPointerSize));
1439 __ Branch(&loop, ne, a2, Operand(sp));
1442 __ Subu(a0, a0, Operand(1));
1447 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1450 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1464 Register scratch = t0;
1465 __ LoadRoot(a1, RootIndex::kUndefinedValue);
1468 __ sll(scratch, a0, kPointerSizeLog2);
1469 __ mov(a0, scratch);
1470 __ Subu(a0, a0, Operand(kPointerSize));
1471 __ Branch(&no_arg, lt, a0, Operand(zero_reg));
1472 __ Addu(a0, sp, Operand(a0));
1473 __ lw(a1, MemOperand(a0));
1474 __ Subu(a0, a0, Operand(kPointerSize));
1475 __ Branch(&no_arg, lt, a0, Operand(sp));
1476 __ lw(a2, MemOperand(a0));
1477 __ Subu(a0, a0, Operand(kPointerSize));
1478 __ Branch(&no_arg, lt, a0, Operand(sp));
1479 __ lw(a3, MemOperand(a0));
1481 __ Addu(sp, sp, Operand(scratch));
1482 __ sw(a2, MemOperand(sp));
1497 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1498 RelocInfo::CODE_TARGET);
1501 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1516 Register scratch = t0;
1517 __ LoadRoot(a1, RootIndex::kUndefinedValue);
1520 __ sll(scratch, a0, kPointerSizeLog2);
1521 __ Addu(a0, sp, Operand(scratch));
1522 __ sw(a2, MemOperand(a0));
1523 __ Subu(a0, a0, Operand(kPointerSize));
1524 __ Branch(&no_arg, lt, a0, Operand(sp));
1525 __ lw(a1, MemOperand(a0));
1527 __ Subu(a0, a0, Operand(kPointerSize));
1528 __ Branch(&no_arg, lt, a0, Operand(sp));
1529 __ lw(a2, MemOperand(a0));
1530 __ Subu(a0, a0, Operand(kPointerSize));
1531 __ Branch(&no_arg, lt, a0, Operand(sp));
1532 __ lw(a3, MemOperand(a0));
1534 __ Addu(sp, sp, Operand(scratch));
1553 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1554 RelocInfo::CODE_TARGET);
1557 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1558 __ sll(a0, a0, kSmiTagSize);
1559 __ li(t0, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1560 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1561 __ Push(Smi::zero());
1563 Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
1566 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1572 __ lw(a1, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1574 __ MultiPop(fp.bit() | ra.bit());
1575 __ Lsa(sp, sp, a1, kPointerSizeLog2 - kSmiTagSize);
1577 __ Addu(sp, sp, Operand(kPointerSize));
1581 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1582 Handle<Code> code) {
1590 if (masm->emit_debug_code()) {
1593 __ AssertNotSmi(a2);
1594 __ GetObjectType(a2, t8, t8);
1595 __ Branch(&ok, eq, t8, Operand(FIXED_ARRAY_TYPE));
1596 __ Branch(&fail, ne, t8, Operand(FIXED_DOUBLE_ARRAY_TYPE));
1597 __ Branch(&ok, eq, t0, Operand(0));
1600 __ Abort(AbortReason::kOperandIsNotAFixedArray);
1606 Label stack_overflow;
1607 Generate_StackOverflowCheck(masm, t0, kScratchReg, t1, &stack_overflow);
1611 __ mov(t2, zero_reg);
1612 Label done, push, loop;
1613 __ LoadRoot(t1, RootIndex::kTheHoleValue);
1615 __ Branch(&done, eq, t2, Operand(t0));
1616 __ Lsa(kScratchReg, a2, t2, kPointerSizeLog2);
1617 __ lw(kScratchReg, FieldMemOperand(kScratchReg, FixedArray::kHeaderSize));
1618 __ Branch(&push, ne, t1, Operand(kScratchReg));
1619 __ LoadRoot(kScratchReg, RootIndex::kUndefinedValue);
1621 __ Push(kScratchReg);
1622 __ Addu(t2, t2, Operand(1));
1625 __ Addu(a0, a0, t2);
1629 __ Jump(code, RelocInfo::CODE_TARGET);
1631 __ bind(&stack_overflow);
1632 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1636 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1637 CallOrConstructMode mode,
1638 Handle<Code> code) {
1647 if (mode == CallOrConstructMode::kConstruct) {
1648 Label new_target_constructor, new_target_not_constructor;
1649 __ JumpIfSmi(a3, &new_target_not_constructor);
1650 __ lw(t1, FieldMemOperand(a3, HeapObject::kMapOffset));
1651 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
1652 __ And(t1, t1, Operand(Map::IsConstructorBit::kMask));
1653 __ Branch(&new_target_constructor, ne, t1, Operand(zero_reg));
1654 __ bind(&new_target_not_constructor);
1656 FrameScope scope(masm, StackFrame::MANUAL);
1657 __ EnterFrame(StackFrame::INTERNAL);
1659 __ CallRuntime(Runtime::kThrowNotConstructor);
1661 __ bind(&new_target_constructor);
1665 Label arguments_adaptor, arguments_done;
1666 __ lw(t3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1667 __ lw(t2, MemOperand(t3, CommonFrameConstants::kContextOrFrameTypeOffset));
1668 __ Branch(&arguments_adaptor, eq, t2,
1669 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1671 __ lw(t2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1672 __ lw(t2, FieldMemOperand(t2, JSFunction::kSharedFunctionInfoOffset));
1673 __ lhu(t2, FieldMemOperand(
1674 t2, SharedFunctionInfo::kFormalParameterCountOffset));
1677 __ Branch(&arguments_done);
1678 __ bind(&arguments_adaptor);
1681 __ lw(t2, MemOperand(t3, ArgumentsAdaptorFrameConstants::kLengthOffset));
1684 __ bind(&arguments_done);
1686 Label stack_done, stack_overflow;
1687 __ Subu(t2, t2, a2);
1688 __ Branch(&stack_done, le, t2, Operand(zero_reg));
1691 Generate_StackOverflowCheck(masm, t2, t0, t1, &stack_overflow);
1696 __ Addu(a0, a0, t2);
1699 __ Lsa(kScratchReg, t3, t2, kPointerSizeLog2);
1700 __ lw(kScratchReg, MemOperand(kScratchReg, 1 * kPointerSize));
1701 __ push(kScratchReg);
1702 __ Subu(t2, t2, Operand(1));
1703 __ Branch(&loop, ne, t2, Operand(zero_reg));
1707 __ Branch(&stack_done);
1708 __ bind(&stack_overflow);
1709 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1710 __ bind(&stack_done);
1713 __ Jump(code, RelocInfo::CODE_TARGET);
1717 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1718 ConvertReceiverMode mode) {
1723 __ AssertFunction(a1);
1727 Label class_constructor;
1728 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1729 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kFlagsOffset));
1730 __ And(kScratchReg, a3,
1731 Operand(SharedFunctionInfo::IsClassConstructorBit::kMask));
1732 __ Branch(&class_constructor, ne, kScratchReg, Operand(zero_reg));
1737 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1740 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kFlagsOffset));
1741 __ And(kScratchReg, a3,
1742 Operand(SharedFunctionInfo::IsNativeBit::kMask |
1743 SharedFunctionInfo::IsStrictBit::kMask));
1744 __ Branch(&done_convert, ne, kScratchReg, Operand(zero_reg));
1753 if (mode == ConvertReceiverMode::kNullOrUndefined) {
1755 __ LoadGlobalProxy(a3);
1757 Label convert_to_object, convert_receiver;
1758 __ Lsa(kScratchReg, sp, a0, kPointerSizeLog2);
1759 __ lw(a3, MemOperand(kScratchReg));
1760 __ JumpIfSmi(a3, &convert_to_object);
1761 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1762 __ GetObjectType(a3, t0, t0);
1763 __ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE));
1764 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1765 Label convert_global_proxy;
1766 __ JumpIfRoot(a3, RootIndex::kUndefinedValue, &convert_global_proxy);
1767 __ JumpIfNotRoot(a3, RootIndex::kNullValue, &convert_to_object);
1768 __ bind(&convert_global_proxy);
1771 __ LoadGlobalProxy(a3);
1773 __ Branch(&convert_receiver);
1775 __ bind(&convert_to_object);
1780 FrameScope scope(masm, StackFrame::INTERNAL);
1781 __ sll(a0, a0, kSmiTagSize);
1785 __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1786 RelocInfo::CODE_TARGET);
1790 __ sra(a0, a0, kSmiTagSize);
1792 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1793 __ bind(&convert_receiver);
1795 __ Lsa(kScratchReg, sp, a0, kPointerSizeLog2);
1796 __ sw(a3, MemOperand(kScratchReg));
1798 __ bind(&done_convert);
1808 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
1809 ParameterCount actual(a0);
1810 ParameterCount expected(a2);
1811 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION);
1814 __ bind(&class_constructor);
1816 FrameScope frame(masm, StackFrame::INTERNAL);
1818 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
1823 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
1828 __ AssertBoundFunction(a1);
1832 __ lw(kScratchReg, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
1833 __ Lsa(t0, sp, a0, kPointerSizeLog2);
1834 __ sw(kScratchReg, MemOperand(t0));
1838 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
1839 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
1852 __ sll(t1, t0, kPointerSizeLog2);
1853 __ Subu(sp, sp, Operand(t1));
1856 __ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
1857 __ Branch(&done, hs, sp, Operand(kScratchReg));
1859 __ Addu(sp, sp, Operand(t1));
1861 FrameScope scope(masm, StackFrame::MANUAL);
1862 __ EnterFrame(StackFrame::INTERNAL);
1863 __ CallRuntime(Runtime::kThrowStackOverflow);
1870 Label loop, done_loop;
1871 __ mov(t1, zero_reg);
1873 __ Branch(&done_loop, gt, t1, Operand(a0));
1874 __ Lsa(t2, sp, t0, kPointerSizeLog2);
1875 __ lw(kScratchReg, MemOperand(t2));
1876 __ Lsa(t2, sp, t1, kPointerSizeLog2);
1877 __ sw(kScratchReg, MemOperand(t2));
1878 __ Addu(t0, t0, Operand(1));
1879 __ Addu(t1, t1, Operand(1));
1881 __ bind(&done_loop);
1886 Label loop, done_loop;
1887 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
1889 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1891 __ Subu(t0, t0, Operand(1));
1892 __ Branch(&done_loop, lt, t0, Operand(zero_reg));
1893 __ Lsa(t1, a2, t0, kPointerSizeLog2);
1894 __ lw(kScratchReg, MemOperand(t1));
1895 __ Lsa(t1, sp, a0, kPointerSizeLog2);
1896 __ sw(kScratchReg, MemOperand(t1));
1897 __ Addu(a0, a0, Operand(1));
1899 __ bind(&done_loop);
1903 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
1904 __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
1905 RelocInfo::CODE_TARGET);
1909 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
1915 Label non_callable, non_function, non_smi;
1916 __ JumpIfSmi(a1, &non_callable);
1918 __ GetObjectType(a1, t1, t2);
1919 __ Jump(masm->isolate()->builtins()->CallFunction(mode),
1920 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
1921 __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
1922 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
1925 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
1926 __ And(t1, t1, Operand(Map::IsCallableBit::kMask));
1927 __ Branch(&non_callable, eq, t1, Operand(zero_reg));
1930 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
1931 __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
1935 __ bind(&non_function);
1937 __ Lsa(kScratchReg, sp, a0, kPointerSizeLog2);
1938 __ sw(a1, MemOperand(kScratchReg));
1940 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
1941 __ Jump(masm->isolate()->builtins()->CallFunction(
1942 ConvertReceiverMode::kNotNullOrUndefined),
1943 RelocInfo::CODE_TARGET);
1946 __ bind(&non_callable);
1948 FrameScope scope(masm, StackFrame::INTERNAL);
1950 __ CallRuntime(Runtime::kThrowCalledNonCallable);
1955 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
1961 __ AssertConstructor(a1);
1962 __ AssertFunction(a1);
1966 __ LoadRoot(a2, RootIndex::kUndefinedValue);
1968 Label call_generic_stub;
1971 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1972 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kFlagsOffset));
1973 __ And(t0, t0, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1974 __ Branch(&call_generic_stub, eq, t0, Operand(zero_reg));
1976 __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
1977 RelocInfo::CODE_TARGET);
1979 __ bind(&call_generic_stub);
1980 __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
1981 RelocInfo::CODE_TARGET);
1985 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
1991 __ AssertConstructor(a1);
1992 __ AssertBoundFunction(a1);
1995 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
1996 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2010 __ sll(t1, t0, kPointerSizeLog2);
2011 __ Subu(sp, sp, Operand(t1));
2014 __ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
2015 __ Branch(&done, hs, sp, Operand(kScratchReg));
2017 __ Addu(sp, sp, Operand(t1));
2019 FrameScope scope(masm, StackFrame::MANUAL);
2020 __ EnterFrame(StackFrame::INTERNAL);
2021 __ CallRuntime(Runtime::kThrowStackOverflow);
2028 Label loop, done_loop;
2029 __ mov(t1, zero_reg);
2031 __ Branch(&done_loop, ge, t1, Operand(a0));
2032 __ Lsa(t2, sp, t0, kPointerSizeLog2);
2033 __ lw(kScratchReg, MemOperand(t2));
2034 __ Lsa(t2, sp, t1, kPointerSizeLog2);
2035 __ sw(kScratchReg, MemOperand(t2));
2036 __ Addu(t0, t0, Operand(1));
2037 __ Addu(t1, t1, Operand(1));
2039 __ bind(&done_loop);
2044 Label loop, done_loop;
2045 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2047 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2049 __ Subu(t0, t0, Operand(1));
2050 __ Branch(&done_loop, lt, t0, Operand(zero_reg));
2051 __ Lsa(t1, a2, t0, kPointerSizeLog2);
2052 __ lw(kScratchReg, MemOperand(t1));
2053 __ Lsa(t1, sp, a0, kPointerSizeLog2);
2054 __ sw(kScratchReg, MemOperand(t1));
2055 __ Addu(a0, a0, Operand(1));
2057 __ bind(&done_loop);
2063 __ Branch(&skip_load, ne, a1, Operand(a3));
2064 __ lw(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2065 __ bind(&skip_load);
2069 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2070 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2074 void Builtins::Generate_Construct(MacroAssembler* masm) {
2083 Label non_constructor, non_proxy;
2084 __ JumpIfSmi(a1, &non_constructor);
2087 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
2088 __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
2089 __ And(t3, t3, Operand(Map::IsConstructorBit::kMask));
2090 __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
2093 __ lhu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
2094 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2095 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2099 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2100 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2103 __ Branch(&non_proxy, ne, t2, Operand(JS_PROXY_TYPE));
2104 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2105 RelocInfo::CODE_TARGET);
2108 __ bind(&non_proxy);
2111 __ Lsa(kScratchReg, sp, a0, kPointerSizeLog2);
2112 __ sw(a1, MemOperand(kScratchReg));
2114 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
2115 __ Jump(masm->isolate()->builtins()->CallFunction(),
2116 RelocInfo::CODE_TARGET);
2121 __ bind(&non_constructor);
2122 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2123 RelocInfo::CODE_TARGET);
2126 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2135 Label invoke, dont_adapt_arguments, stack_overflow;
2137 Label enough, too_few;
2138 __ Branch(&dont_adapt_arguments, eq, a2,
2139 Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2141 __ Branch(&too_few, Uless, a0, Operand(a2));
2149 EnterArgumentsAdaptorFrame(masm);
2150 Generate_StackOverflowCheck(masm, a2, t1, kScratchReg, &stack_overflow);
2153 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
2155 __ Addu(a0, a0, Operand(2 * kPointerSize));
2157 __ sll(t1, a2, kPointerSizeLog2);
2158 __ subu(t1, a0, t1);
2169 __ lw(t0, MemOperand(a0));
2171 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t1));
2172 __ addiu(a0, a0, -kPointerSize);
2179 EnterArgumentsAdaptorFrame(masm);
2180 Generate_StackOverflowCheck(masm, a2, t1, kScratchReg, &stack_overflow);
2187 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
2189 __ Addu(a0, a0, Operand(2 * kPointerSize));
2191 __ Addu(t3, fp, kPointerSize);
2201 __ lw(t0, MemOperand(a0));
2202 __ Subu(sp, sp, kPointerSize);
2203 __ Subu(a0, a0, kPointerSize);
2204 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t3));
2205 __ sw(t0, MemOperand(sp));
2211 __ LoadRoot(t0, RootIndex::kUndefinedValue);
2212 __ sll(t2, a2, kPointerSizeLog2);
2213 __ Subu(t1, fp, Operand(t2));
2216 Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
2221 __ Subu(sp, sp, kPointerSize);
2222 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(t1));
2223 __ sw(t0, MemOperand(sp));
2232 static_assert(kJavaScriptCallCodeStartRegister == a2,
"ABI mismatch");
2233 __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
2234 __ Addu(a2, a2, Code::kHeaderSize - kHeapObjectTag);
2238 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2241 LeaveArgumentsAdaptorFrame(masm);
2247 __ bind(&dont_adapt_arguments);
2248 static_assert(kJavaScriptCallCodeStartRegister == a2,
"ABI mismatch");
2249 __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
2250 __ Addu(a2, a2, Code::kHeaderSize - kHeapObjectTag);
2253 __ bind(&stack_overflow);
2255 FrameScope frame(masm, StackFrame::MANUAL);
2256 __ CallRuntime(Runtime::kThrowStackOverflow);
2261 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2264 __ SmiTag(kWasmCompileLazyFuncIndexRegister);
2266 HardAbortScope hard_abort(masm);
2267 FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2272 constexpr RegList gp_regs = Register::ListOf<a0, a1, a2, a3>();
2273 constexpr RegList fp_regs =
2274 DoubleRegister::ListOf<f2, f4, f6, f8, f10, f12, f14>();
2275 __ MultiPush(gp_regs);
2276 __ MultiPushFPU(fp_regs);
2280 __ Push(kWasmInstanceRegister, kWasmCompileLazyFuncIndexRegister);
2282 __ lw(a2, FieldMemOperand(kWasmInstanceRegister,
2283 WasmInstanceObject::kCEntryStubOffset));
2286 __ Move(kContextRegister, Smi::zero());
2287 __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, a2);
2290 __ MultiPopFPU(fp_regs);
2291 __ MultiPop(gp_regs);
2294 __ Jump(kScratchReg, v0, 0);
2297 void Builtins::Generate_CEntry(MacroAssembler* masm,
int result_size,
2298 SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2299 bool builtin_exit_frame) {
2310 if (argv_mode == kArgvInRegister) {
2315 __ Lsa(s1, sp, a0, kPointerSizeLog2);
2316 __ Subu(s1, s1, kPointerSize);
2320 FrameScope scope(masm, StackFrame::MANUAL);
2322 save_doubles == kSaveFPRegs, 0,
2323 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2337 __ AssertStackIsAligned();
2340 __ li(a2, ExternalReference::isolate_address(masm->isolate()));
2348 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm);
2349 int kNumInstructionsToJump = 4;
2354 if (kArchVariant >= kMips32r6) {
2355 __ addiupc(ra, kNumInstructionsToJump + 1);
2359 __ Addu(ra, ra, kNumInstructionsToJump * kInstrSize);
2364 __ sw(ra, MemOperand(sp));
2372 __ addiu(sp, sp, -kCArgsSlotsSize);
2374 DCHECK_EQ(kNumInstructionsToJump,
2375 masm->InstructionsGeneratedSince(&find_ra));
2381 Label exception_returned;
2382 __ LoadRoot(t0, RootIndex::kException);
2383 __ Branch(&exception_returned, eq, t0, Operand(v0));
2387 if (FLAG_debug_code) {
2389 ExternalReference pending_exception_address = ExternalReference::Create(
2390 IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2391 __ li(a2, pending_exception_address);
2392 __ lw(a2, MemOperand(a2));
2393 __ LoadRoot(t0, RootIndex::kTheHoleValue);
2395 __ Branch(&okay, eq, t0, Operand(a2));
2396 __ stop(
"Unexpected pending exception");
2404 Register argc = argv_mode == kArgvInRegister
2409 __ LeaveExitFrame(save_doubles == kSaveFPRegs, argc, EMIT_RETURN);
2412 __ bind(&exception_returned);
2414 ExternalReference pending_handler_context_address = ExternalReference::Create(
2415 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2416 ExternalReference pending_handler_entrypoint_address =
2417 ExternalReference::Create(
2418 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2419 ExternalReference pending_handler_fp_address = ExternalReference::Create(
2420 IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2421 ExternalReference pending_handler_sp_address = ExternalReference::Create(
2422 IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2426 ExternalReference find_handler =
2427 ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2429 FrameScope scope(masm, StackFrame::MANUAL);
2430 __ PrepareCallCFunction(3, 0, a0);
2431 __ mov(a0, zero_reg);
2432 __ mov(a1, zero_reg);
2433 __ li(a2, ExternalReference::isolate_address(masm->isolate()));
2434 __ CallCFunction(find_handler, 3);
2438 __ li(cp, pending_handler_context_address);
2439 __ lw(cp, MemOperand(cp));
2440 __ li(sp, pending_handler_sp_address);
2441 __ lw(sp, MemOperand(sp));
2442 __ li(fp, pending_handler_fp_address);
2443 __ lw(fp, MemOperand(fp));
2448 __ Branch(&zero, eq, cp, Operand(zero_reg));
2449 __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2456 __ ResetSpeculationPoisonRegister();
2459 __ li(t9, pending_handler_entrypoint_address);
2460 __ lw(t9, MemOperand(t9));
2464 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2465 Label out_of_range, only_low, negate, done;
2466 Register result_reg = t0;
2468 Register scratch = GetRegisterThatIsNotOneOf(result_reg);
2469 Register scratch2 = GetRegisterThatIsNotOneOf(result_reg, scratch);
2470 Register scratch3 = GetRegisterThatIsNotOneOf(result_reg, scratch, scratch2);
2471 DoubleRegister double_scratch = kScratchDoubleReg;
2474 const int kArgumentOffset = 4 * kPointerSize;
2476 __ Push(result_reg);
2477 __ Push(scratch, scratch2, scratch3);
2480 __ Ldc1(double_scratch, MemOperand(sp, kArgumentOffset));
2483 __ cfc1(scratch2, FCSR);
2484 __ ctc1(zero_reg, FCSR);
2487 __ Trunc_w_d(double_scratch, double_scratch);
2489 __ mfc1(scratch3, double_scratch);
2492 __ cfc1(scratch, FCSR);
2493 __ ctc1(scratch2, FCSR);
2498 kFCSROverflowFlagMask | kFCSRUnderflowFlagMask | kFCSRInvalidOpFlagMask);
2501 __ Branch(&error, ne, scratch, Operand(zero_reg));
2502 __ Move(result_reg, scratch3);
2507 Register input_high = scratch2;
2508 Register input_low = scratch3;
2510 __ lw(input_low, MemOperand(sp, kArgumentOffset + Register::kMantissaOffset));
2512 MemOperand(sp, kArgumentOffset + Register::kExponentOffset));
2514 Label normal_exponent, restore_sign;
2516 __ Ext(result_reg, input_high, HeapNumber::kExponentShift,
2517 HeapNumber::kExponentBits);
2520 __ Subu(scratch, result_reg, HeapNumber::kExponentMask);
2521 __ Movz(result_reg, zero_reg, scratch);
2522 __ Branch(&done, eq, scratch, Operand(zero_reg));
2525 __ Subu(result_reg, result_reg,
2526 Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 31));
2530 __ Branch(&normal_exponent, le, result_reg, Operand(zero_reg));
2531 __ mov(result_reg, zero_reg);
2534 __ bind(&normal_exponent);
2535 const int kShiftBase = HeapNumber::kNonMantissaBitsInTopWord - 1;
2537 __ Addu(scratch, result_reg, Operand(kShiftBase + HeapNumber::kMantissaBits));
2540 Register sign = result_reg;
2541 result_reg = no_reg;
2542 __ And(sign, input_high, Operand(HeapNumber::kSignMask));
2546 Label high_shift_needed, high_shift_done;
2547 __ Branch(&high_shift_needed, lt, scratch, Operand(32));
2548 __ mov(input_high, zero_reg);
2549 __ Branch(&high_shift_done);
2550 __ bind(&high_shift_needed);
2553 __ Or(input_high, input_high,
2554 Operand(1 << HeapNumber::kMantissaBitsInTopWord));
2558 __ sllv(input_high, input_high, scratch);
2560 __ bind(&high_shift_done);
2563 Label pos_shift, shift_done;
2564 __ li(kScratchReg, 32);
2565 __ subu(scratch, kScratchReg, scratch);
2566 __ Branch(&pos_shift, ge, scratch, Operand(zero_reg));
2569 __ Subu(scratch, zero_reg, scratch);
2570 __ sllv(input_low, input_low, scratch);
2571 __ Branch(&shift_done);
2573 __ bind(&pos_shift);
2574 __ srlv(input_low, input_low, scratch);
2576 __ bind(&shift_done);
2577 __ Or(input_high, input_high, Operand(input_low));
2579 __ mov(scratch, sign);
2582 __ Subu(result_reg, zero_reg, input_high);
2583 __ Movz(result_reg, input_high, scratch);
2586 __ sw(result_reg, MemOperand(sp, kArgumentOffset));
2587 __ Pop(scratch, scratch2, scratch3);
2592 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2593 const Register exponent = a2;
2594 const DoubleRegister double_base = f2;
2595 const DoubleRegister double_exponent = f4;
2596 const DoubleRegister double_result = f0;
2597 const DoubleRegister double_scratch = f6;
2598 const FPURegister single_scratch = f8;
2599 const Register scratch = t5;
2600 const Register scratch2 = t3;
2602 Label call_runtime, done, int_exponent;
2604 Label int_exponent_convert;
2606 __ EmitFPUTruncate(kRoundToMinusInf, scratch, double_exponent, kScratchReg,
2607 double_scratch, scratch2, kCheckForInexactConversion);
2609 __ Branch(&int_exponent_convert, eq, scratch2, Operand(zero_reg));
2613 AllowExternalCallThatCantCauseGC scope(masm);
2614 __ PrepareCallCFunction(0, 2, scratch2);
2615 __ MovToFloatParameters(double_base, double_exponent);
2616 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2619 __ MovFromFloatResult(double_result);
2622 __ bind(&int_exponent_convert);
2625 __ bind(&int_exponent);
2629 __ mov(exponent, scratch);
2631 __ mov_d(double_scratch, double_base);
2632 __ Move(double_result, 1.0);
2635 Label positive_exponent, bail_out;
2636 __ Branch(&positive_exponent, ge, scratch, Operand(zero_reg));
2637 __ Subu(scratch, zero_reg, scratch);
2640 __ Branch(&bail_out, gt, zero_reg, Operand(scratch));
2641 __ bind(&positive_exponent);
2642 __ Assert(ge, AbortReason::kUnexpectedNegativeValue, scratch,
2645 Label while_true, no_carry, loop_end;
2646 __ bind(&while_true);
2648 __ And(scratch2, scratch, 1);
2650 __ Branch(&no_carry, eq, scratch2, Operand(zero_reg));
2651 __ mul_d(double_result, double_result, double_scratch);
2654 __ sra(scratch, scratch, 1);
2656 __ Branch(&loop_end, eq, scratch, Operand(zero_reg));
2657 __ mul_d(double_scratch, double_scratch, double_scratch);
2659 __ Branch(&while_true);
2663 __ Branch(&done, ge, exponent, Operand(zero_reg));
2664 __ Move(double_scratch, 1.0);
2665 __ div_d(double_result, double_scratch, double_result);
2668 __ CompareF64(EQ, double_result, kDoubleRegZero);
2669 __ BranchFalseShortF(&done);
2674 __ mtc1(exponent, single_scratch);
2675 __ cvt_d_w(double_exponent, single_scratch);
2680 AllowExternalCallThatCantCauseGC scope(masm);
2681 __ PrepareCallCFunction(0, 2, scratch);
2682 __ MovToFloatParameters(double_base, double_exponent);
2683 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2686 __ MovFromFloatResult(double_result);
2694 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2695 ElementsKind kind) {
2698 __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue);
2700 __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2702 RelocInfo::CODE_TARGET, lo, a0, Operand(1));
2704 __ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
2705 RelocInfo::CODE_TARGET, hi, a0, Operand(1));
2707 if (IsFastPackedElementsKind(kind)) {
2710 __ lw(kScratchReg, MemOperand(sp, 0));
2712 __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2713 masm->isolate(), GetHoleyElementsKind(kind))
2715 RelocInfo::CODE_TARGET, ne, kScratchReg, Operand(zero_reg));
2719 CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2721 RelocInfo::CODE_TARGET);
2726 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2734 if (FLAG_debug_code) {
2739 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
2741 __ SmiTst(a3, kScratchReg);
2742 __ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction,
2743 kScratchReg, Operand(zero_reg));
2744 __ GetObjectType(a3, a3, t0);
2745 __ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction, t0,
2750 __ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
2754 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset));
2756 __ DecodeField<Map::ElementsKindBits>(a3);
2758 if (FLAG_debug_code) {
2760 __ Branch(&done, eq, a3, Operand(PACKED_ELEMENTS));
2763 AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray,
2764 a3, Operand(HOLEY_ELEMENTS));
2768 Label fast_elements_case;
2769 __ Branch(&fast_elements_case, eq, a3, Operand(PACKED_ELEMENTS));
2770 GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2772 __ bind(&fast_elements_case);
2773 GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2781 #endif // V8_TARGET_ARCH_MIPS