5 #if V8_TARGET_ARCH_MIPS64 7 #include "src/code-factory.h" 8 #include "src/code-stubs.h" 9 #include "src/counters.h" 10 #include "src/debug/debug.h" 11 #include "src/deoptimizer.h" 12 #include "src/frame-constants.h" 13 #include "src/frames.h" 14 #include "src/mips64/constants-mips64.h" 15 #include "src/objects-inl.h" 16 #include "src/objects/js-generator.h" 17 #include "src/objects/smi.h" 18 #include "src/register-configuration.h" 19 #include "src/runtime/runtime.h" 20 #include "src/wasm/wasm-objects.h" 25 #define __ ACCESS_MASM(masm) 27 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
28 ExitFrameType exit_frame_type) {
29 __ li(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
30 if (exit_frame_type == BUILTIN_EXIT) {
31 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
32 RelocInfo::CODE_TARGET);
34 DCHECK(exit_frame_type == EXIT);
35 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
36 RelocInfo::CODE_TARGET);
40 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
46 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
48 if (FLAG_debug_code) {
50 __ Ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
52 __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction,
53 a4, Operand(zero_reg));
54 __ GetObjectType(a2, a3, a4);
55 __ Assert(eq, AbortReason::kUnexpectedInitialMapForInternalArrayFunction,
56 a4, Operand(MAP_TYPE));
61 __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
62 RelocInfo::CODE_TARGET);
65 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
66 Runtime::FunctionId function_id) {
73 FrameScope scope(masm, StackFrame::INTERNAL);
77 __ Push(a0, a1, a3, a1);
79 __ CallRuntime(function_id, 1);
85 static_assert(kJavaScriptCallCodeStartRegister == a2,
"ABI mismatch");
86 __ Daddu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
92 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
104 FrameScope scope(masm, StackFrame::CONSTRUCT);
112 __ PushRoot(RootIndex::kTheHoleValue);
115 __ Daddu(t2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
131 __ Dlsa(t0, t2, t3, kPointerSizeLog2);
132 __ Ld(t1, MemOperand(t0));
135 __ Daddu(t3, t3, Operand(-1));
136 __ Branch(&loop, greater_equal, t3, Operand(zero_reg));
142 ParameterCount actual(a0);
143 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION);
146 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
148 __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
153 __ SmiScale(a4, a1, kPointerSizeLog2);
154 __ Daddu(sp, sp, a4);
155 __ Daddu(sp, sp, kPointerSize);
159 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
160 Register scratch1, Register scratch2,
161 Label* stack_overflow) {
165 __ LoadRoot(scratch1, RootIndex::kRealStackLimit);
168 __ dsubu(scratch1, sp, scratch1);
170 __ dsll(scratch2, num_args, kPointerSizeLog2);
172 __ Branch(stack_overflow, le, scratch1, Operand(scratch2));
178 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
190 FrameScope scope(masm, StackFrame::CONSTRUCT);
191 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
196 __ PushRoot(RootIndex::kTheHoleValue);
207 __ Ld(t2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
208 __ lwu(t2, FieldMemOperand(t2, SharedFunctionInfo::kFlagsOffset));
209 __ And(t2, t2, Operand(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
210 __ Branch(¬_create_implicit_receiver, ne, t2, Operand(zero_reg));
213 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
215 __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
216 RelocInfo::CODE_TARGET);
217 __ Branch(&post_instantiation_deopt_entry);
220 __ bind(¬_create_implicit_receiver);
221 __ LoadRoot(v0, RootIndex::kTheHoleValue);
232 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
234 __ bind(&post_instantiation_deopt_entry);
254 __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
255 __ Ld(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
259 __ Daddu(t2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
261 Label enough_stack_space, stack_overflow;
262 Generate_StackOverflowCheck(masm, a0, t0, t1, &stack_overflow);
263 __ Branch(&enough_stack_space);
265 __ bind(&stack_overflow);
267 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
268 __ CallRuntime(Runtime::kThrowStackOverflow);
272 __ bind(&enough_stack_space);
291 __ Dlsa(t0, t2, t3, kPointerSizeLog2);
292 __ Ld(t1, MemOperand(t0));
295 __ Daddu(t3, t3, Operand(-1));
296 __ Branch(&loop, greater_equal, t3, Operand(zero_reg));
299 ParameterCount actual(a0);
300 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION);
312 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
316 __ Ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
321 Label use_receiver, do_throw, leave_frame;
324 __ JumpIfRoot(v0, RootIndex::kUndefinedValue, &use_receiver);
330 __ JumpIfSmi(v0, &use_receiver);
334 __ GetObjectType(v0, t2, t2);
335 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
336 __ Branch(&leave_frame, greater_equal, t2, Operand(FIRST_JS_RECEIVER_TYPE));
337 __ Branch(&use_receiver);
340 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
344 __ bind(&use_receiver);
345 __ Ld(v0, MemOperand(sp, 0 * kPointerSize));
346 __ JumpIfRoot(v0, RootIndex::kTheHoleValue, &do_throw);
348 __ bind(&leave_frame);
350 __ Ld(a1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
354 __ SmiScale(a4, a1, kPointerSizeLog2);
355 __ Daddu(sp, sp, a4);
356 __ Daddu(sp, sp, kPointerSize);
360 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
361 Generate_JSBuiltinsConstructStubHelper(masm);
364 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
369 __ GetObjectType(sfi_data, scratch1, scratch1);
370 __ Branch(&done, ne, scratch1, Operand(INTERPRETER_DATA_TYPE));
372 FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
378 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
384 __ AssertGeneratorObject(a1);
387 __ Sd(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset));
388 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3,
389 kRAHasNotBeenSaved, kDontSaveFPRegs);
392 __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
393 __ Ld(cp, FieldMemOperand(a4, JSFunction::kContextOffset));
396 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
397 Label stepping_prepared;
398 ExternalReference debug_hook =
399 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
400 __ li(a5, debug_hook);
401 __ Lb(a5, MemOperand(a5));
402 __ Branch(&prepare_step_in_if_stepping, ne, a5, Operand(zero_reg));
405 ExternalReference debug_suspended_generator =
406 ExternalReference::debug_suspended_generator_address(masm->isolate());
407 __ li(a5, debug_suspended_generator);
408 __ Ld(a5, MemOperand(a5));
409 __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(a5));
410 __ bind(&stepping_prepared);
414 Label stack_overflow;
415 __ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
416 __ Branch(&stack_overflow, lo, sp, Operand(kScratchReg));
419 __ Ld(a5, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
434 __ Ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
436 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
438 FieldMemOperand(a1, JSGeneratorObject::kParametersAndRegistersOffset));
440 Label done_loop, loop;
441 __ Move(t2, zero_reg);
443 __ Dsubu(a3, a3, Operand(1));
444 __ Branch(&done_loop, lt, a3, Operand(zero_reg));
445 __ Dlsa(kScratchReg, t1, t2, kPointerSizeLog2);
446 __ Ld(kScratchReg, FieldMemOperand(kScratchReg, FixedArray::kHeaderSize));
447 __ Push(kScratchReg);
448 __ Daddu(t2, t2, Operand(1));
454 if (FLAG_debug_code) {
455 __ Ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
456 __ Ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
457 GetSharedFunctionInfoBytecode(masm, a3, a0);
458 __ GetObjectType(a3, a3, a3);
459 __ Assert(eq, AbortReason::kMissingBytecodeArray, a3,
460 Operand(BYTECODE_ARRAY_TYPE));
465 __ Ld(a0, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
466 __ Lhu(a0, FieldMemOperand(
467 a0, SharedFunctionInfo::kFormalParameterCountOffset));
473 static_assert(kJavaScriptCallCodeStartRegister == a2,
"ABI mismatch");
474 __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
475 __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
479 __ bind(&prepare_step_in_if_stepping);
481 FrameScope scope(masm, StackFrame::INTERNAL);
484 __ PushRoot(RootIndex::kTheHoleValue);
485 __ CallRuntime(Runtime::kDebugOnFunctionCall);
488 __ Branch(USE_DELAY_SLOT, &stepping_prepared);
489 __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
491 __ bind(&prepare_step_in_suspended_generator);
493 FrameScope scope(masm, StackFrame::INTERNAL);
495 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
498 __ Branch(USE_DELAY_SLOT, &stepping_prepared);
499 __ Ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
501 __ bind(&stack_overflow);
503 FrameScope scope(masm, StackFrame::INTERNAL);
504 __ CallRuntime(Runtime::kThrowStackOverflow);
509 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
510 FrameScope scope(masm, StackFrame::INTERNAL);
512 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
516 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
521 __ LoadRoot(a2, RootIndex::kRealStackLimit);
524 __ dsubu(a2, sp, a2);
526 __ dsll(a7, argc, kPointerSizeLog2);
527 __ Branch(&okay, gt, a2, Operand(a7));
530 __ CallRuntime(Runtime::kThrowStackOverflow);
535 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
547 FrameScope scope(masm, StackFrame::INTERNAL);
550 ExternalReference context_address = ExternalReference::Create(
551 IsolateAddressId::kContextAddress, masm->isolate());
552 __ li(cp, context_address);
553 __ Ld(cp, MemOperand(cp));
560 Generate_CheckStackOverflow(masm, a3);
569 __ Dlsa(a6, s0, a3, kPointerSizeLog2);
574 __ Ld(a4, MemOperand(s0));
575 __ daddiu(s0, s0, kPointerSize);
576 __ Ld(a4, MemOperand(a4));
579 __ Branch(&loop, ne, s0, Operand(a6));
587 __ LoadRoot(a4, RootIndex::kUndefinedValue);
597 Handle<Code> builtin = is_construct
598 ? BUILTIN_CODE(masm->isolate(), Construct)
599 : masm->isolate()->builtins()->Call();
600 __ Call(builtin, RelocInfo::CODE_TARGET);
607 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
608 Generate_JSEntryTrampolineHelper(masm,
false);
611 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
612 Generate_JSEntryTrampolineHelper(masm,
true);
615 static void ReplaceClosureCodeWithOptimizedCode(
616 MacroAssembler* masm, Register optimized_code, Register closure,
617 Register scratch1, Register scratch2, Register scratch3) {
619 __ Sd(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset));
620 __ mov(scratch1, optimized_code);
621 __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
622 kRAHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
626 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
627 Register args_count = scratch;
631 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
632 __ Lw(t0, FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
635 __ LeaveFrame(StackFrame::INTERPRETED);
638 __ Daddu(sp, sp, args_count);
642 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
644 OptimizationMarker marker,
645 Runtime::FunctionId function_id) {
647 __ Branch(&no_match, ne, smi_entry, Operand(Smi::FromEnum(marker)));
648 GenerateTailCallToReturnedCode(masm, function_id);
652 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
653 Register feedback_vector,
654 Register scratch1, Register scratch2,
663 !AreAliased(feedback_vector, a0, a1, a3, scratch1, scratch2, scratch3));
665 Label optimized_code_slot_is_weak_ref, fallthrough;
667 Register closure = a1;
668 Register optimized_code_entry = scratch1;
670 __ Ld(optimized_code_entry,
671 FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
676 __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
682 __ Branch(&fallthrough, eq, optimized_code_entry,
683 Operand(Smi::FromEnum(OptimizationMarker::kNone)));
685 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
686 OptimizationMarker::kLogFirstExecution,
687 Runtime::kFunctionFirstExecution);
688 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
689 OptimizationMarker::kCompileOptimized,
690 Runtime::kCompileOptimized_NotConcurrent);
691 TailCallRuntimeIfMarkerEquals(
692 masm, optimized_code_entry,
693 OptimizationMarker::kCompileOptimizedConcurrent,
694 Runtime::kCompileOptimized_Concurrent);
699 if (FLAG_debug_code) {
701 eq, AbortReason::kExpectedOptimizationSentinel,
702 optimized_code_entry,
703 Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
705 __ jmp(&fallthrough);
711 __ bind(&optimized_code_slot_is_weak_ref);
713 __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
717 Label found_deoptimized_code;
718 __ Ld(a5, FieldMemOperand(optimized_code_entry,
719 Code::kCodeDataContainerOffset));
720 __ Lw(a5, FieldMemOperand(a5, CodeDataContainer::kKindSpecificFlagsOffset));
721 __ And(a5, a5, Operand(1 << Code::kMarkedForDeoptimizationBit));
722 __ Branch(&found_deoptimized_code, ne, a5, Operand(zero_reg));
728 ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
729 scratch2, scratch3, feedback_vector);
731 static_assert(kJavaScriptCallCodeStartRegister == a2,
"ABI mismatch");
732 __ Daddu(a2, optimized_code_entry,
733 Operand(Code::kHeaderSize - kHeapObjectTag));
738 __ bind(&found_deoptimized_code);
739 GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
744 __ bind(&fallthrough);
750 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
751 Register bytecode_array,
752 Register bytecode_offset,
753 Register bytecode, Register scratch1,
754 Register scratch2, Label* if_return) {
755 Register bytecode_size_table = scratch1;
756 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
758 __ li(bytecode_size_table, ExternalReference::bytecode_size_table_address());
761 Label process_bytecode, extra_wide;
762 STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
763 STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
764 STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
766 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
767 __ Branch(&process_bytecode, hi, bytecode, Operand(3));
768 __ And(scratch2, bytecode, Operand(1));
769 __ Branch(&extra_wide, ne, scratch2, Operand(zero_reg));
772 __ Daddu(bytecode_offset, bytecode_offset, Operand(1));
773 __ Daddu(scratch2, bytecode_array, bytecode_offset);
774 __ Lbu(bytecode, MemOperand(scratch2));
775 __ Daddu(bytecode_size_table, bytecode_size_table,
776 Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
777 __ jmp(&process_bytecode);
779 __ bind(&extra_wide);
781 __ Daddu(bytecode_offset, bytecode_offset, Operand(1));
782 __ Daddu(scratch2, bytecode_array, bytecode_offset);
783 __ Lbu(bytecode, MemOperand(scratch2));
784 __ Daddu(bytecode_size_table, bytecode_size_table,
785 Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
787 __ bind(&process_bytecode);
790 #define JUMP_IF_EQUAL(NAME) \ 791 __ Branch(if_return, eq, bytecode, \ 792 Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); 793 RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
797 __ Dlsa(scratch2, bytecode_size_table, bytecode, 2);
798 __ Lw(scratch2, MemOperand(scratch2));
799 __ Daddu(bytecode_offset, bytecode_offset, scratch2);
817 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
818 Register closure = a1;
819 Register feedback_vector = a2;
822 __ Ld(feedback_vector,
823 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
824 __ Ld(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset));
827 MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, a4, t3, a5);
832 FrameScope frame_scope(masm, StackFrame::MANUAL);
833 __ PushStandardFrame(closure);
837 __ Ld(a0, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
838 __ Ld(kInterpreterBytecodeArrayRegister,
839 FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
840 GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, a4);
843 __ Lw(a4, FieldMemOperand(feedback_vector,
844 FeedbackVector::kInvocationCountOffset));
845 __ Addu(a4, a4, Operand(1));
846 __ Sw(a4, FieldMemOperand(feedback_vector,
847 FeedbackVector::kInvocationCountOffset));
850 if (FLAG_debug_code) {
851 __ SmiTst(kInterpreterBytecodeArrayRegister, a4);
853 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
854 a4, Operand(zero_reg));
855 __ GetObjectType(kInterpreterBytecodeArrayRegister, a4, a4);
857 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
858 a4, Operand(BYTECODE_ARRAY_TYPE));
862 DCHECK_EQ(0, BytecodeArray::kNoAgeBytecodeAge);
863 __ sb(zero_reg, FieldMemOperand(kInterpreterBytecodeArrayRegister,
864 BytecodeArray::kBytecodeAgeOffset));
867 __ li(kInterpreterBytecodeOffsetRegister,
868 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
871 __ SmiTag(a4, kInterpreterBytecodeOffsetRegister);
872 __ Push(kInterpreterBytecodeArrayRegister, a4);
877 __ Lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
878 BytecodeArray::kFrameSizeOffset));
882 __ Dsubu(a5, sp, Operand(a4));
883 __ LoadRoot(a2, RootIndex::kRealStackLimit);
884 __ Branch(&ok, hs, a5, Operand(a2));
885 __ CallRuntime(Runtime::kThrowStackOverflow);
891 __ LoadRoot(a5, RootIndex::kUndefinedValue);
892 __ Branch(&loop_check);
893 __ bind(&loop_header);
897 __ bind(&loop_check);
898 __ Dsubu(a4, a4, Operand(kPointerSize));
899 __ Branch(&loop_header, ge, a4, Operand(zero_reg));
904 Label no_incoming_new_target_or_generator_register;
905 __ Lw(a5, FieldMemOperand(
906 kInterpreterBytecodeArrayRegister,
907 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
908 __ Branch(&no_incoming_new_target_or_generator_register, eq, a5,
910 __ Dlsa(a5, fp, a5, kPointerSizeLog2);
911 __ Sd(a3, MemOperand(a5));
912 __ bind(&no_incoming_new_target_or_generator_register);
915 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
920 __ bind(&do_dispatch);
921 __ li(kInterpreterDispatchTableRegister,
922 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
923 __ Daddu(a0, kInterpreterBytecodeArrayRegister,
924 kInterpreterBytecodeOffsetRegister);
925 __ Lbu(a7, MemOperand(a0));
926 __ Dlsa(kScratchReg, kInterpreterDispatchTableRegister, a7, kPointerSizeLog2);
927 __ Ld(kJavaScriptCallCodeStartRegister, MemOperand(kScratchReg));
928 __ Call(kJavaScriptCallCodeStartRegister);
929 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
935 __ Ld(kInterpreterBytecodeArrayRegister,
936 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
937 __ Ld(kInterpreterBytecodeOffsetRegister,
938 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
939 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
943 __ Daddu(a1, kInterpreterBytecodeArrayRegister,
944 kInterpreterBytecodeOffsetRegister);
945 __ Lbu(a1, MemOperand(a1));
946 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
947 kInterpreterBytecodeOffsetRegister, a1, a2, a3,
949 __ jmp(&do_dispatch);
953 LeaveInterpreterFrame(masm, t0);
957 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
958 Register num_args, Register index,
959 Register scratch, Register scratch2) {
961 __ mov(scratch2, num_args);
962 __ dsll(scratch2, scratch2, kPointerSizeLog2);
963 __ Dsubu(scratch2, index, Operand(scratch2));
966 Label loop_header, loop_check;
967 __ Branch(&loop_check);
968 __ bind(&loop_header);
969 __ Ld(scratch, MemOperand(index));
970 __ Daddu(index, index, Operand(-kPointerSize));
972 __ bind(&loop_check);
973 __ Branch(&loop_header, gt, index, Operand(scratch2));
977 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
978 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
979 InterpreterPushArgsMode mode) {
980 DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
988 Label stack_overflow;
990 __ Daddu(a3, a0, Operand(1));
993 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
994 __ PushRoot(RootIndex::kUndefinedValue);
995 __ Dsubu(a3, a3, Operand(1));
998 Generate_StackOverflowCheck(masm, a3, a4, t0, &stack_overflow);
1001 Generate_InterpreterPushArgs(masm, a3, a2, a4, t0);
1003 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1005 __ Dsubu(a0, a0, Operand(1));
1009 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1010 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1011 RelocInfo::CODE_TARGET);
1013 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1014 RelocInfo::CODE_TARGET);
1017 __ bind(&stack_overflow);
1019 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1026 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1027 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1035 Label stack_overflow;
1040 Generate_StackOverflowCheck(masm, a0, a5, t0, &stack_overflow);
1043 Generate_InterpreterPushArgs(masm, a0, a4, a5, t0);
1045 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1047 __ Dsubu(a0, a0, Operand(1));
1049 __ AssertUndefinedOrAllocationSite(a2, t0);
1052 if (mode == InterpreterPushArgsMode::kArrayFunction) {
1053 __ AssertFunction(a1);
1057 __ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
1058 RelocInfo::CODE_TARGET);
1059 }
else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1061 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1062 RelocInfo::CODE_TARGET);
1064 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1066 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1069 __ bind(&stack_overflow);
1071 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1077 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1080 Label builtin_trampoline, trampoline_loaded;
1081 Smi interpreter_entry_return_pc_offset(
1082 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1083 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1089 __ Ld(t0, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1090 __ Ld(t0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
1091 __ Ld(t0, FieldMemOperand(t0, SharedFunctionInfo::kFunctionDataOffset));
1092 __ GetObjectType(t0, kInterpreterDispatchTableRegister,
1093 kInterpreterDispatchTableRegister);
1094 __ Branch(&builtin_trampoline, ne, kInterpreterDispatchTableRegister,
1095 Operand(INTERPRETER_DATA_TYPE));
1097 __ Ld(t0, FieldMemOperand(t0, InterpreterData::kInterpreterTrampolineOffset));
1098 __ Daddu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1099 __ Branch(&trampoline_loaded);
1101 __ bind(&builtin_trampoline);
1102 __ li(t0, ExternalReference::
1103 address_of_interpreter_entry_trampoline_instruction_start(
1105 __ Ld(t0, MemOperand(t0));
1107 __ bind(&trampoline_loaded);
1108 __ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset->value()));
1111 __ li(kInterpreterDispatchTableRegister,
1112 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1115 __ Ld(kInterpreterBytecodeArrayRegister,
1116 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1118 if (FLAG_debug_code) {
1120 __ SmiTst(kInterpreterBytecodeArrayRegister, kScratchReg);
1122 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1123 kScratchReg, Operand(zero_reg));
1124 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1126 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1127 a1, Operand(BYTECODE_ARRAY_TYPE));
1131 __ SmiUntag(kInterpreterBytecodeOffsetRegister,
1132 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1135 __ Daddu(a1, kInterpreterBytecodeArrayRegister,
1136 kInterpreterBytecodeOffsetRegister);
1137 __ Lbu(a7, MemOperand(a1));
1138 __ Dlsa(a1, kInterpreterDispatchTableRegister, a7, kPointerSizeLog2);
1139 __ Ld(kJavaScriptCallCodeStartRegister, MemOperand(a1));
1140 __ Jump(kJavaScriptCallCodeStartRegister);
1143 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1147 __ Ld(kInterpreterBytecodeArrayRegister,
1148 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1149 __ Ld(kInterpreterBytecodeOffsetRegister,
1150 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1151 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1154 __ Daddu(a1, kInterpreterBytecodeArrayRegister,
1155 kInterpreterBytecodeOffsetRegister);
1156 __ Lbu(a1, MemOperand(a1));
1160 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1161 kInterpreterBytecodeOffsetRegister, a1, a2, a3,
1165 __ SmiTag(a2, kInterpreterBytecodeOffsetRegister);
1166 __ Sd(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1168 Generate_InterpreterEnterBytecode(masm);
1171 __ bind(&if_return);
1172 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1175 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1176 Generate_InterpreterEnterBytecode(masm);
1179 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1187 FrameScope scope(masm, StackFrame::INTERNAL);
1192 __ Push(a0, a1, a3, a1);
1196 for (
int j = 0; j < 4; ++j) {
1199 __ Branch(&over, ne, t2, Operand(j));
1201 for (
int i = j - 1;
i >= 0; --
i) {
1202 __ Ld(t2, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1206 for (
int i = 0;
i < 3 - j; ++
i) {
1207 __ PushRoot(RootIndex::kUndefinedValue);
1214 __ bind(&args_done);
1217 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1219 __ JumpIfSmi(v0, &failed);
1224 scope.GenerateLeaveFrame();
1226 __ Daddu(t2, t2, Operand(1));
1227 __ Dlsa(sp, sp, t2, kPointerSizeLog2);
1237 static_assert(kJavaScriptCallCodeStartRegister == a2,
"ABI mismatch");
1238 __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
1239 __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
1244 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1245 bool java_script_builtin,
1247 const RegisterConfiguration* config(RegisterConfiguration::Default());
1248 int allocatable_register_count = config->num_allocatable_general_registers();
1254 sp, config->num_allocatable_general_registers() * kPointerSize +
1255 BuiltinContinuationFrameConstants::kFixedFrameSize));
1257 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
1258 int code = config->GetAllocatableGeneralCode(
i);
1259 __ Pop(Register::from_code(code));
1260 if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1261 __ SmiUntag(Register::from_code(code));
1264 __ Ld(fp, MemOperand(
1265 sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1268 Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1270 __ Daddu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1275 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1276 Generate_ContinueToBuiltinHelper(masm,
false,
false);
1279 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1280 MacroAssembler* masm) {
1281 Generate_ContinueToBuiltinHelper(masm,
false,
true);
1284 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1285 Generate_ContinueToBuiltinHelper(masm,
true,
false);
1288 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1289 MacroAssembler* masm) {
1290 Generate_ContinueToBuiltinHelper(masm,
true,
true);
1293 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1295 FrameScope scope(masm, StackFrame::INTERNAL);
1296 __ CallRuntime(Runtime::kNotifyDeoptimized);
1299 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
1300 __ Ld(v0, MemOperand(sp, 0 * kPointerSize));
1301 __ Ret(USE_DELAY_SLOT);
1303 __ Daddu(sp, sp, Operand(1 * kPointerSize));
1306 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1308 __ Ld(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1309 __ Ld(a0, MemOperand(a0, JavaScriptFrameConstants::kFunctionOffset));
1312 FrameScope scope(masm, StackFrame::INTERNAL);
1315 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1319 __ Ret(eq, v0, Operand(Smi::zero()));
1323 __ LeaveFrame(StackFrame::STUB);
1327 __ Ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1331 __ SmiUntag(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1332 DeoptimizationData::kOsrPcOffsetIndex) -
1337 __ Daddu(v0, v0, a1);
1338 __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1345 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1354 Register arg_array = a2;
1355 Register receiver = a1;
1356 Register this_arg = a5;
1357 Register undefined_value = a3;
1358 Register scratch = a4;
1360 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
1369 __ Dsubu(sp, sp, Operand(2 * kPointerSize));
1370 __ Dlsa(sp, sp, argc, kPointerSizeLog2);
1371 __ mov(scratch, argc);
1372 __ Pop(this_arg, arg_array);
1373 __ Movz(arg_array, undefined_value, scratch);
1374 __ Movz(this_arg, undefined_value, scratch);
1375 __ Dsubu(scratch, scratch, Operand(1));
1376 __ Movz(arg_array, undefined_value, scratch);
1377 __ Ld(receiver, MemOperand(sp));
1378 __ Sd(this_arg, MemOperand(sp));
1394 __ JumpIfRoot(arg_array, RootIndex::kNullValue, &no_arguments);
1395 __ Branch(&no_arguments, eq, arg_array, Operand(undefined_value));
1398 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1399 RelocInfo::CODE_TARGET);
1403 __ bind(&no_arguments);
1405 __ mov(a0, zero_reg);
1406 DCHECK(receiver == a1);
1407 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1412 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1417 __ Branch(&done, ne, a0, Operand(zero_reg));
1418 __ PushRoot(RootIndex::kUndefinedValue);
1419 __ Daddu(a0, a0, Operand(1));
1425 __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
1426 __ Ld(a1, MemOperand(kScratchReg));
1436 __ Dlsa(a2, sp, a0, kPointerSizeLog2);
1439 __ Ld(kScratchReg, MemOperand(a2, -kPointerSize));
1440 __ Sd(kScratchReg, MemOperand(a2));
1441 __ Dsubu(a2, a2, Operand(kPointerSize));
1442 __ Branch(&loop, ne, a2, Operand(sp));
1445 __ Dsubu(a0, a0, Operand(1));
1450 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1453 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1463 Register arguments_list = a2;
1464 Register target = a1;
1465 Register this_argument = a5;
1466 Register undefined_value = a3;
1467 Register scratch = a4;
1469 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
1478 __ Dsubu(sp, sp, Operand(3 * kPointerSize));
1479 __ Dlsa(sp, sp, argc, kPointerSizeLog2);
1480 __ mov(scratch, argc);
1481 __ Pop(target, this_argument, arguments_list);
1482 __ Movz(arguments_list, undefined_value, scratch);
1483 __ Movz(this_argument, undefined_value, scratch);
1484 __ Movz(target, undefined_value, scratch);
1485 __ Dsubu(scratch, scratch, Operand(1));
1486 __ Movz(arguments_list, undefined_value, scratch);
1487 __ Movz(this_argument, undefined_value, scratch);
1488 __ Dsubu(scratch, scratch, Operand(1));
1489 __ Movz(arguments_list, undefined_value, scratch);
1491 __ Sd(this_argument, MemOperand(sp, 0));
1506 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1507 RelocInfo::CODE_TARGET);
1510 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1519 Register arguments_list = a2;
1520 Register target = a1;
1521 Register new_target = a3;
1522 Register undefined_value = a4;
1523 Register scratch = a5;
1525 __ LoadRoot(undefined_value, RootIndex::kUndefinedValue);
1535 __ Dsubu(sp, sp, Operand(3 * kPointerSize));
1536 __ Dlsa(sp, sp, argc, kPointerSizeLog2);
1537 __ mov(scratch, argc);
1538 __ Pop(target, arguments_list, new_target);
1539 __ Movz(arguments_list, undefined_value, scratch);
1540 __ Movz(new_target, undefined_value, scratch);
1541 __ Movz(target, undefined_value, scratch);
1542 __ Dsubu(scratch, scratch, Operand(1));
1543 __ Movz(arguments_list, undefined_value, scratch);
1544 __ Movz(new_target, target, scratch);
1545 __ Dsubu(scratch, scratch, Operand(1));
1546 __ Movz(new_target, target, scratch);
1548 __ Sd(undefined_value, MemOperand(sp, 0));
1567 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1568 RelocInfo::CODE_TARGET);
1571 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1573 __ li(a4, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1574 __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit());
1575 __ Push(Smi::zero());
1577 Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
1580 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1586 __ Ld(a1, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1588 __ MultiPop(fp.bit() | ra.bit());
1589 __ SmiScale(a4, a1, kPointerSizeLog2);
1590 __ Daddu(sp, sp, a4);
1592 __ Daddu(sp, sp, Operand(kPointerSize));
1596 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1597 Handle<Code> code) {
1605 if (masm->emit_debug_code()) {
1608 __ AssertNotSmi(a2);
1609 __ GetObjectType(a2, t8, t8);
1610 __ Branch(&ok, eq, t8, Operand(FIXED_ARRAY_TYPE));
1611 __ Branch(&fail, ne, t8, Operand(FIXED_DOUBLE_ARRAY_TYPE));
1612 __ Branch(&ok, eq, a4, Operand(zero_reg));
1615 __ Abort(AbortReason::kOperandIsNotAFixedArray);
1624 Label stack_overflow;
1625 Generate_StackOverflowCheck(masm, len, kScratchReg, a5, &stack_overflow);
1629 Label done, push, loop;
1631 Register scratch = len;
1633 __ daddiu(src, args, FixedArray::kHeaderSize - kHeapObjectTag);
1634 __ Branch(&done, eq, len, Operand(zero_reg), i::USE_DELAY_SLOT);
1635 __ Daddu(a0, a0, len);
1636 __ dsll(scratch, len, kPointerSizeLog2);
1637 __ Dsubu(scratch, sp, Operand(scratch));
1638 __ LoadRoot(t1, RootIndex::kTheHoleValue);
1640 __ Ld(a5, MemOperand(src));
1641 __ Branch(&push, ne, a5, Operand(t1));
1642 __ LoadRoot(a5, RootIndex::kUndefinedValue);
1644 __ daddiu(src, src, kPointerSize);
1646 __ Branch(&loop, ne, scratch, Operand(sp));
1651 __ Jump(code, RelocInfo::CODE_TARGET);
1653 __ bind(&stack_overflow);
1654 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1658 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1659 CallOrConstructMode mode,
1660 Handle<Code> code) {
1669 if (mode == CallOrConstructMode::kConstruct) {
1670 Label new_target_constructor, new_target_not_constructor;
1671 __ JumpIfSmi(a3, &new_target_not_constructor);
1672 __ ld(t1, FieldMemOperand(a3, HeapObject::kMapOffset));
1673 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
1674 __ And(t1, t1, Operand(Map::IsConstructorBit::kMask));
1675 __ Branch(&new_target_constructor, ne, t1, Operand(zero_reg));
1676 __ bind(&new_target_not_constructor);
1678 FrameScope scope(masm, StackFrame::MANUAL);
1679 __ EnterFrame(StackFrame::INTERNAL);
1681 __ CallRuntime(Runtime::kThrowNotConstructor);
1683 __ bind(&new_target_constructor);
1687 Label arguments_adaptor, arguments_done;
1688 __ Ld(a6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1689 __ Ld(a7, MemOperand(a6, CommonFrameConstants::kContextOrFrameTypeOffset));
1690 __ Branch(&arguments_adaptor, eq, a7,
1691 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1693 __ Ld(a7, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1694 __ Ld(a7, FieldMemOperand(a7, JSFunction::kSharedFunctionInfoOffset));
1695 __ Lhu(a7, FieldMemOperand(
1696 a7, SharedFunctionInfo::kFormalParameterCountOffset));
1699 __ Branch(&arguments_done);
1700 __ bind(&arguments_adaptor);
1704 MemOperand(a6, ArgumentsAdaptorFrameConstants::kLengthOffset));
1706 __ bind(&arguments_done);
1708 Label stack_done, stack_overflow;
1709 __ Subu(a7, a7, a2);
1710 __ Branch(&stack_done, le, a7, Operand(zero_reg));
1713 Generate_StackOverflowCheck(masm, a7, a4, a5, &stack_overflow);
1718 __ Daddu(a0, a0, a7);
1721 __ Dlsa(kScratchReg, a6, a7, kPointerSizeLog2);
1722 __ Ld(kScratchReg, MemOperand(kScratchReg, 1 * kPointerSize));
1723 __ push(kScratchReg);
1724 __ Subu(a7, a7, Operand(1));
1725 __ Branch(&loop, ne, a7, Operand(zero_reg));
1729 __ Branch(&stack_done);
1730 __ bind(&stack_overflow);
1731 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1732 __ bind(&stack_done);
1735 __ Jump(code, RelocInfo::CODE_TARGET);
1739 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1740 ConvertReceiverMode mode) {
1745 __ AssertFunction(a1);
1749 Label class_constructor;
1750 __ Ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1751 __ Lwu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFlagsOffset));
1752 __ And(kScratchReg, a3,
1753 Operand(SharedFunctionInfo::IsClassConstructorBit::kMask));
1754 __ Branch(&class_constructor, ne, kScratchReg, Operand(zero_reg));
1759 __ Ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1762 __ Lwu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFlagsOffset));
1763 __ And(kScratchReg, a3,
1764 Operand(SharedFunctionInfo::IsNativeBit::kMask |
1765 SharedFunctionInfo::IsStrictBit::kMask));
1766 __ Branch(&done_convert, ne, kScratchReg, Operand(zero_reg));
1775 if (mode == ConvertReceiverMode::kNullOrUndefined) {
1777 __ LoadGlobalProxy(a3);
1779 Label convert_to_object, convert_receiver;
1780 __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
1781 __ Ld(a3, MemOperand(kScratchReg));
1782 __ JumpIfSmi(a3, &convert_to_object);
1783 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1784 __ GetObjectType(a3, a4, a4);
1785 __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE));
1786 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1787 Label convert_global_proxy;
1788 __ JumpIfRoot(a3, RootIndex::kUndefinedValue, &convert_global_proxy);
1789 __ JumpIfNotRoot(a3, RootIndex::kNullValue, &convert_to_object);
1790 __ bind(&convert_global_proxy);
1793 __ LoadGlobalProxy(a3);
1795 __ Branch(&convert_receiver);
1797 __ bind(&convert_to_object);
1802 FrameScope scope(masm, StackFrame::INTERNAL);
1807 __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1808 RelocInfo::CODE_TARGET);
1814 __ Ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1815 __ bind(&convert_receiver);
1817 __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
1818 __ Sd(a3, MemOperand(kScratchReg));
1820 __ bind(&done_convert);
1830 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
1831 ParameterCount actual(a0);
1832 ParameterCount expected(a2);
1833 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION);
1836 __ bind(&class_constructor);
1838 FrameScope frame(masm, StackFrame::INTERNAL);
1840 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
1845 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
1850 __ AssertBoundFunction(a1);
1854 __ Ld(kScratchReg, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
1855 __ Dlsa(a4, sp, a0, kPointerSizeLog2);
1856 __ Sd(kScratchReg, MemOperand(a4));
1860 __ Ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
1861 __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
1873 __ dsll(a5, a4, kPointerSizeLog2);
1874 __ Dsubu(sp, sp, Operand(a5));
1877 __ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
1878 __ Branch(&done, hs, sp, Operand(kScratchReg));
1880 __ Daddu(sp, sp, Operand(a5));
1882 FrameScope scope(masm, StackFrame::MANUAL);
1883 __ EnterFrame(StackFrame::INTERNAL);
1884 __ CallRuntime(Runtime::kThrowStackOverflow);
1891 Label loop, done_loop;
1892 __ mov(a5, zero_reg);
1894 __ Branch(&done_loop, gt, a5, Operand(a0));
1895 __ Dlsa(a6, sp, a4, kPointerSizeLog2);
1896 __ Ld(kScratchReg, MemOperand(a6));
1897 __ Dlsa(a6, sp, a5, kPointerSizeLog2);
1898 __ Sd(kScratchReg, MemOperand(a6));
1899 __ Daddu(a4, a4, Operand(1));
1900 __ Daddu(a5, a5, Operand(1));
1902 __ bind(&done_loop);
1907 Label loop, done_loop;
1908 __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
1909 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1911 __ Dsubu(a4, a4, Operand(1));
1912 __ Branch(&done_loop, lt, a4, Operand(zero_reg));
1913 __ Dlsa(a5, a2, a4, kPointerSizeLog2);
1914 __ Ld(kScratchReg, MemOperand(a5));
1915 __ Dlsa(a5, sp, a0, kPointerSizeLog2);
1916 __ Sd(kScratchReg, MemOperand(a5));
1917 __ Daddu(a0, a0, Operand(1));
1919 __ bind(&done_loop);
1923 __ Ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
1924 __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
1925 RelocInfo::CODE_TARGET);
1929 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
1935 Label non_callable, non_function, non_smi;
1936 __ JumpIfSmi(a1, &non_callable);
1938 __ GetObjectType(a1, t1, t2);
1939 __ Jump(masm->isolate()->builtins()->CallFunction(mode),
1940 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
1941 __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
1942 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
1945 __ Lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
1946 __ And(t1, t1, Operand(Map::IsCallableBit::kMask));
1947 __ Branch(&non_callable, eq, t1, Operand(zero_reg));
1949 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
1950 __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
1954 __ bind(&non_function);
1956 __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
1957 __ Sd(a1, MemOperand(kScratchReg));
1959 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
1960 __ Jump(masm->isolate()->builtins()->CallFunction(
1961 ConvertReceiverMode::kNotNullOrUndefined),
1962 RelocInfo::CODE_TARGET);
1965 __ bind(&non_callable);
1967 FrameScope scope(masm, StackFrame::INTERNAL);
1969 __ CallRuntime(Runtime::kThrowCalledNonCallable);
1973 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
1979 __ AssertConstructor(a1);
1980 __ AssertFunction(a1);
1984 __ LoadRoot(a2, RootIndex::kUndefinedValue);
1986 Label call_generic_stub;
1989 __ Ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1990 __ lwu(a4, FieldMemOperand(a4, SharedFunctionInfo::kFlagsOffset));
1991 __ And(a4, a4, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
1992 __ Branch(&call_generic_stub, eq, a4, Operand(zero_reg));
1994 __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
1995 RelocInfo::CODE_TARGET);
1997 __ bind(&call_generic_stub);
1998 __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
1999 RelocInfo::CODE_TARGET);
2003 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2009 __ AssertConstructor(a1);
2010 __ AssertBoundFunction(a1);
2013 __ Ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2014 __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2027 __ dsll(a5, a4, kPointerSizeLog2);
2028 __ Dsubu(sp, sp, Operand(a5));
2031 __ LoadRoot(kScratchReg, RootIndex::kRealStackLimit);
2032 __ Branch(&done, hs, sp, Operand(kScratchReg));
2034 __ Daddu(sp, sp, Operand(a5));
2036 FrameScope scope(masm, StackFrame::MANUAL);
2037 __ EnterFrame(StackFrame::INTERNAL);
2038 __ CallRuntime(Runtime::kThrowStackOverflow);
2045 Label loop, done_loop;
2046 __ mov(a5, zero_reg);
2048 __ Branch(&done_loop, ge, a5, Operand(a0));
2049 __ Dlsa(a6, sp, a4, kPointerSizeLog2);
2050 __ Ld(kScratchReg, MemOperand(a6));
2051 __ Dlsa(a6, sp, a5, kPointerSizeLog2);
2052 __ Sd(kScratchReg, MemOperand(a6));
2053 __ Daddu(a4, a4, Operand(1));
2054 __ Daddu(a5, a5, Operand(1));
2056 __ bind(&done_loop);
2061 Label loop, done_loop;
2062 __ SmiUntag(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2063 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2065 __ Dsubu(a4, a4, Operand(1));
2066 __ Branch(&done_loop, lt, a4, Operand(zero_reg));
2067 __ Dlsa(a5, a2, a4, kPointerSizeLog2);
2068 __ Ld(kScratchReg, MemOperand(a5));
2069 __ Dlsa(a5, sp, a0, kPointerSizeLog2);
2070 __ Sd(kScratchReg, MemOperand(a5));
2071 __ Daddu(a0, a0, Operand(1));
2073 __ bind(&done_loop);
2079 __ Branch(&skip_load, ne, a1, Operand(a3));
2080 __ Ld(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2081 __ bind(&skip_load);
2085 __ Ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2086 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2090 void Builtins::Generate_Construct(MacroAssembler* masm) {
2099 Label non_constructor, non_proxy;
2100 __ JumpIfSmi(a1, &non_constructor);
2103 __ ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
2104 __ Lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
2105 __ And(t3, t3, Operand(Map::IsConstructorBit::kMask));
2106 __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
2109 __ Lhu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
2110 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2111 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2115 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2116 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2119 __ Branch(&non_proxy, ne, t2, Operand(JS_PROXY_TYPE));
2120 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2121 RelocInfo::CODE_TARGET);
2124 __ bind(&non_proxy);
2127 __ Dlsa(kScratchReg, sp, a0, kPointerSizeLog2);
2128 __ Sd(a1, MemOperand(kScratchReg));
2130 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
2131 __ Jump(masm->isolate()->builtins()->CallFunction(),
2132 RelocInfo::CODE_TARGET);
2137 __ bind(&non_constructor);
2138 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2139 RelocInfo::CODE_TARGET);
2142 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2151 Label invoke, dont_adapt_arguments, stack_overflow;
2153 Label enough, too_few;
2154 __ Branch(&dont_adapt_arguments, eq, a2,
2155 Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2157 __ Branch(&too_few, Uless, a0, Operand(a2));
2165 EnterArgumentsAdaptorFrame(masm);
2166 Generate_StackOverflowCheck(masm, a2, a5, kScratchReg, &stack_overflow);
2169 __ SmiScale(a0, a0, kPointerSizeLog2);
2170 __ Daddu(a0, fp, a0);
2172 __ Daddu(a0, a0, Operand(2 * kPointerSize));
2174 __ dsll(a4, a2, kPointerSizeLog2);
2175 __ dsubu(a4, a0, a4);
2186 __ Ld(a5, MemOperand(a0));
2188 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(a4));
2189 __ daddiu(a0, a0, -kPointerSize);
2196 EnterArgumentsAdaptorFrame(masm);
2197 Generate_StackOverflowCheck(masm, a2, a5, kScratchReg, &stack_overflow);
2204 __ SmiScale(a0, a0, kPointerSizeLog2);
2205 __ Daddu(a0, fp, a0);
2207 __ Daddu(a0, a0, Operand(2 * kPointerSize));
2209 __ Daddu(a7, fp, kPointerSize);
2219 __ Ld(a4, MemOperand(a0));
2220 __ Dsubu(sp, sp, kPointerSize);
2221 __ Dsubu(a0, a0, kPointerSize);
2222 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(a7));
2223 __ Sd(a4, MemOperand(sp));
2229 __ LoadRoot(a5, RootIndex::kUndefinedValue);
2230 __ dsll(a6, a2, kPointerSizeLog2);
2231 __ Dsubu(a4, fp, Operand(a6));
2234 Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
2239 __ Dsubu(sp, sp, kPointerSize);
2240 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a4));
2241 __ Sd(a5, MemOperand(sp));
2250 static_assert(kJavaScriptCallCodeStartRegister == a2,
"ABI mismatch");
2251 __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
2252 __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
2256 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2259 LeaveArgumentsAdaptorFrame(masm);
2265 __ bind(&dont_adapt_arguments);
2266 static_assert(kJavaScriptCallCodeStartRegister == a2,
"ABI mismatch");
2267 __ Ld(a2, FieldMemOperand(a1, JSFunction::kCodeOffset));
2268 __ Daddu(a2, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
2271 __ bind(&stack_overflow);
2273 FrameScope frame(masm, StackFrame::MANUAL);
2274 __ CallRuntime(Runtime::kThrowStackOverflow);
2279 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2282 __ SmiTag(kWasmCompileLazyFuncIndexRegister);
2284 HardAbortScope hard_abort(masm);
2285 FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2290 constexpr RegList gp_regs =
2291 Register::ListOf<a0, a1, a2, a3, a4, a5, a6, a7>();
2292 constexpr RegList fp_regs =
2293 DoubleRegister::ListOf<f2, f4, f6, f8, f10, f12, f14>();
2294 __ MultiPush(gp_regs);
2295 __ MultiPushFPU(fp_regs);
2299 __ Push(kWasmInstanceRegister, kWasmCompileLazyFuncIndexRegister);
2301 __ Ld(a2, FieldMemOperand(kWasmInstanceRegister,
2302 WasmInstanceObject::kCEntryStubOffset));
2305 __ Move(kContextRegister, Smi::zero());
2306 __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, a2);
2309 __ MultiPopFPU(fp_regs);
2310 __ MultiPop(gp_regs);
2316 void Builtins::Generate_CEntry(MacroAssembler* masm,
int result_size,
2317 SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2318 bool builtin_exit_frame) {
2329 if (argv_mode == kArgvInRegister) {
2334 __ Dlsa(s1, sp, a0, kPointerSizeLog2);
2335 __ Dsubu(s1, s1, kPointerSize);
2339 FrameScope scope(masm, StackFrame::MANUAL);
2341 save_doubles == kSaveFPRegs, 0,
2342 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2356 __ AssertStackIsAligned();
2359 __ li(a2, ExternalReference::isolate_address(masm->isolate()));
2367 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm);
2368 int kNumInstructionsToJump = 4;
2373 if (kArchVariant >= kMips64r6) {
2374 __ addiupc(ra, kNumInstructionsToJump + 1);
2378 __ Daddu(ra, ra, kNumInstructionsToJump * kInstrSize);
2383 __ Sd(ra, MemOperand(sp));
2391 __ daddiu(sp, sp, -kCArgsSlotsSize);
2393 DCHECK_EQ(kNumInstructionsToJump,
2394 masm->InstructionsGeneratedSince(&find_ra));
2400 Label exception_returned;
2401 __ LoadRoot(a4, RootIndex::kException);
2402 __ Branch(&exception_returned, eq, a4, Operand(v0));
2406 if (FLAG_debug_code) {
2408 ExternalReference pending_exception_address = ExternalReference::Create(
2409 IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2410 __ li(a2, pending_exception_address);
2411 __ Ld(a2, MemOperand(a2));
2412 __ LoadRoot(a4, RootIndex::kTheHoleValue);
2414 __ Branch(&okay, eq, a4, Operand(a2));
2415 __ stop(
"Unexpected pending exception");
2423 Register argc = argv_mode == kArgvInRegister
2428 __ LeaveExitFrame(save_doubles == kSaveFPRegs, argc, EMIT_RETURN);
2431 __ bind(&exception_returned);
2433 ExternalReference pending_handler_context_address = ExternalReference::Create(
2434 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2435 ExternalReference pending_handler_entrypoint_address =
2436 ExternalReference::Create(
2437 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2438 ExternalReference pending_handler_fp_address = ExternalReference::Create(
2439 IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2440 ExternalReference pending_handler_sp_address = ExternalReference::Create(
2441 IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2445 ExternalReference find_handler =
2446 ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2448 FrameScope scope(masm, StackFrame::MANUAL);
2449 __ PrepareCallCFunction(3, 0, a0);
2450 __ mov(a0, zero_reg);
2451 __ mov(a1, zero_reg);
2452 __ li(a2, ExternalReference::isolate_address(masm->isolate()));
2453 __ CallCFunction(find_handler, 3);
2457 __ li(cp, pending_handler_context_address);
2458 __ Ld(cp, MemOperand(cp));
2459 __ li(sp, pending_handler_sp_address);
2460 __ Ld(sp, MemOperand(sp));
2461 __ li(fp, pending_handler_fp_address);
2462 __ Ld(fp, MemOperand(fp));
2467 __ Branch(&zero, eq, cp, Operand(zero_reg));
2468 __ Sd(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2475 __ ResetSpeculationPoisonRegister();
2478 __ li(t9, pending_handler_entrypoint_address);
2479 __ Ld(t9, MemOperand(t9));
2483 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2484 Label out_of_range, only_low, negate, done;
2485 Register result_reg = t0;
2487 Register scratch = GetRegisterThatIsNotOneOf(result_reg);
2488 Register scratch2 = GetRegisterThatIsNotOneOf(result_reg, scratch);
2489 Register scratch3 = GetRegisterThatIsNotOneOf(result_reg, scratch, scratch2);
2490 DoubleRegister double_scratch = kScratchDoubleReg;
2493 const int kArgumentOffset = 4 * kPointerSize;
2495 __ Push(result_reg);
2496 __ Push(scratch, scratch2, scratch3);
2499 __ Ldc1(double_scratch, MemOperand(sp, kArgumentOffset));
2502 __ cfc1(scratch2, FCSR);
2503 __ ctc1(zero_reg, FCSR);
2506 __ Trunc_w_d(double_scratch, double_scratch);
2508 __ mfc1(scratch3, double_scratch);
2511 __ cfc1(scratch, FCSR);
2512 __ ctc1(scratch2, FCSR);
2517 kFCSROverflowFlagMask | kFCSRUnderflowFlagMask | kFCSRInvalidOpFlagMask);
2520 __ Branch(&error, ne, scratch, Operand(zero_reg));
2521 __ Move(result_reg, scratch3);
2526 Register input_high = scratch2;
2527 Register input_low = scratch3;
2529 __ Lw(input_low, MemOperand(sp, kArgumentOffset + Register::kMantissaOffset));
2531 MemOperand(sp, kArgumentOffset + Register::kExponentOffset));
2533 Label normal_exponent, restore_sign;
2535 __ Ext(result_reg, input_high, HeapNumber::kExponentShift,
2536 HeapNumber::kExponentBits);
2539 __ Subu(scratch, result_reg, HeapNumber::kExponentMask);
2540 __ Movz(result_reg, zero_reg, scratch);
2541 __ Branch(&done, eq, scratch, Operand(zero_reg));
2544 __ Subu(result_reg, result_reg,
2545 Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits + 31));
2549 __ Branch(&normal_exponent, le, result_reg, Operand(zero_reg));
2550 __ mov(result_reg, zero_reg);
2553 __ bind(&normal_exponent);
2554 const int kShiftBase = HeapNumber::kNonMantissaBitsInTopWord - 1;
2556 __ Addu(scratch, result_reg, Operand(kShiftBase + HeapNumber::kMantissaBits));
2559 Register sign = result_reg;
2560 result_reg = no_reg;
2561 __ And(sign, input_high, Operand(HeapNumber::kSignMask));
2565 Label high_shift_needed, high_shift_done;
2566 __ Branch(&high_shift_needed, lt, scratch, Operand(32));
2567 __ mov(input_high, zero_reg);
2568 __ Branch(&high_shift_done);
2569 __ bind(&high_shift_needed);
2572 __ Or(input_high, input_high,
2573 Operand(1 << HeapNumber::kMantissaBitsInTopWord));
2577 __ sllv(input_high, input_high, scratch);
2579 __ bind(&high_shift_done);
2582 Label pos_shift, shift_done;
2583 __ li(kScratchReg, 32);
2584 __ subu(scratch, kScratchReg, scratch);
2585 __ Branch(&pos_shift, ge, scratch, Operand(zero_reg));
2588 __ Subu(scratch, zero_reg, scratch);
2589 __ sllv(input_low, input_low, scratch);
2590 __ Branch(&shift_done);
2592 __ bind(&pos_shift);
2593 __ srlv(input_low, input_low, scratch);
2595 __ bind(&shift_done);
2596 __ Or(input_high, input_high, Operand(input_low));
2598 __ mov(scratch, sign);
2601 __ Subu(result_reg, zero_reg, input_high);
2602 __ Movz(result_reg, input_high, scratch);
2606 __ Sd(result_reg, MemOperand(sp, kArgumentOffset));
2607 __ Pop(scratch, scratch2, scratch3);
2612 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2613 const Register exponent = a2;
2614 const DoubleRegister double_base = f2;
2615 const DoubleRegister double_exponent = f4;
2616 const DoubleRegister double_result = f0;
2617 const DoubleRegister double_scratch = f6;
2618 const FPURegister single_scratch = f8;
2619 const Register scratch = t1;
2620 const Register scratch2 = a7;
2622 Label call_runtime, done, int_exponent;
2624 Label int_exponent_convert;
2626 __ EmitFPUTruncate(kRoundToMinusInf, scratch, double_exponent, kScratchReg,
2627 double_scratch, scratch2, kCheckForInexactConversion);
2629 __ Branch(&int_exponent_convert, eq, scratch2, Operand(zero_reg));
2633 AllowExternalCallThatCantCauseGC scope(masm);
2634 __ PrepareCallCFunction(0, 2, scratch2);
2635 __ MovToFloatParameters(double_base, double_exponent);
2636 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2639 __ MovFromFloatResult(double_result);
2642 __ bind(&int_exponent_convert);
2645 __ bind(&int_exponent);
2649 __ mov(exponent, scratch);
2651 __ mov_d(double_scratch, double_base);
2652 __ Move(double_result, 1.0);
2655 Label positive_exponent, bail_out;
2656 __ Branch(&positive_exponent, ge, scratch, Operand(zero_reg));
2657 __ Dsubu(scratch, zero_reg, scratch);
2660 __ Branch(&bail_out, gt, zero_reg, Operand(scratch));
2661 __ bind(&positive_exponent);
2662 __ Assert(ge, AbortReason::kUnexpectedNegativeValue, scratch,
2665 Label while_true, no_carry, loop_end;
2666 __ bind(&while_true);
2668 __ And(scratch2, scratch, 1);
2670 __ Branch(&no_carry, eq, scratch2, Operand(zero_reg));
2671 __ mul_d(double_result, double_result, double_scratch);
2674 __ dsra(scratch, scratch, 1);
2676 __ Branch(&loop_end, eq, scratch, Operand(zero_reg));
2677 __ mul_d(double_scratch, double_scratch, double_scratch);
2679 __ Branch(&while_true);
2683 __ Branch(&done, ge, exponent, Operand(zero_reg));
2684 __ Move(double_scratch, 1.0);
2685 __ div_d(double_result, double_scratch, double_result);
2688 __ CompareF64(EQ, double_result, kDoubleRegZero);
2689 __ BranchFalseShortF(&done);
2694 __ mtc1(exponent, single_scratch);
2695 __ cvt_d_w(double_exponent, single_scratch);
2700 AllowExternalCallThatCantCauseGC scope(masm);
2701 __ PrepareCallCFunction(0, 2, scratch);
2702 __ MovToFloatParameters(double_base, double_exponent);
2703 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2706 __ MovFromFloatResult(double_result);
2714 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2715 ElementsKind kind) {
2718 __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue);
2720 __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2722 RelocInfo::CODE_TARGET, lo, a0, Operand(1));
2724 __ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
2725 RelocInfo::CODE_TARGET, hi, a0, Operand(1));
2727 if (IsFastPackedElementsKind(kind)) {
2730 __ Ld(kScratchReg, MemOperand(sp, 0));
2732 __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2733 masm->isolate(), GetHoleyElementsKind(kind))
2735 RelocInfo::CODE_TARGET, ne, kScratchReg, Operand(zero_reg));
2739 CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2741 RelocInfo::CODE_TARGET);
2746 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2754 if (FLAG_debug_code) {
2759 __ Ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
2761 __ SmiTst(a3, kScratchReg);
2762 __ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction,
2763 kScratchReg, Operand(zero_reg));
2764 __ GetObjectType(a3, a3, a4);
2765 __ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction, a4,
2770 __ Ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
2774 __ Lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset));
2776 __ DecodeField<Map::ElementsKindBits>(a3);
2778 if (FLAG_debug_code) {
2780 __ Branch(&done, eq, a3, Operand(PACKED_ELEMENTS));
2783 AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray,
2784 a3, Operand(HOLEY_ELEMENTS));
2788 Label fast_elements_case;
2789 __ Branch(&fast_elements_case, eq, a3, Operand(PACKED_ELEMENTS));
2790 GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2792 __ bind(&fast_elements_case);
2793 GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2801 #endif // V8_TARGET_ARCH_MIPS64