7 #include "src/assembler-inl.h" 8 #include "src/code-factory.h" 9 #include "src/code-stubs.h" 10 #include "src/counters.h" 11 #include "src/debug/debug.h" 12 #include "src/deoptimizer.h" 13 #include "src/frame-constants.h" 14 #include "src/frames.h" 15 #include "src/objects/js-generator.h" 16 #include "src/objects/smi.h" 17 #include "src/register-configuration.h" 18 #include "src/runtime/runtime.h" 19 #include "src/wasm/wasm-objects.h" 24 #define __ ACCESS_MASM(masm) 26 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
27 ExitFrameType exit_frame_type) {
28 __ Move(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
29 if (exit_frame_type == BUILTIN_EXIT) {
30 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
31 RelocInfo::CODE_TARGET);
33 DCHECK(exit_frame_type == EXIT);
34 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
35 RelocInfo::CODE_TARGET);
39 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
45 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
47 if (FLAG_debug_code) {
49 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
51 __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction,
53 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
54 __ Assert(eq, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
59 __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
60 RelocInfo::CODE_TARGET);
63 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
64 Runtime::FunctionId function_id) {
71 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
76 __ Push(r3, r4, r6, r4);
78 __ CallRuntime(function_id, 1);
85 static_assert(kJavaScriptCallCodeStartRegister == r5,
"ABI mismatch");
86 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
92 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
93 Label post_instantiation_deopt_entry;
105 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
111 __ SmiUntag(r3, SetRC);
113 __ PushRoot(RootIndex::kTheHoleValue);
115 __ addi(r7, fp, Operand(StandardFrameConstants::kCallerSPOffset));
130 __ beq(&no_args, cr0);
131 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
135 __ subi(ip, ip, Operand(kPointerSize));
136 __ LoadPX(r0, MemOperand(r7, ip));
137 __ StorePX(r0, MemOperand(sp, ip));
146 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
147 ParameterCount actual(r3);
148 __ InvokeFunction(r4, r6, actual, CALL_FUNCTION);
152 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
154 __ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
159 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
161 __ SmiToPtrArrayOffset(r4, r4);
163 __ addi(sp, sp, Operand(kPointerSize));
167 void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
168 Register scratch, Label* stack_overflow) {
172 __ LoadRoot(scratch, RootIndex::kRealStackLimit);
175 __ sub(scratch, sp, scratch);
177 __ ShiftLeftImm(r0, num_args, Operand(kPointerSizeLog2));
179 __ ble(stack_overflow);
185 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
197 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
198 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
203 __ PushRoot(RootIndex::kUndefinedValue);
214 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
215 __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kFlagsOffset));
216 __ TestBitMask(r7, SharedFunctionInfo::IsDerivedConstructorBit::kMask, r0);
217 __ bne(¬_create_implicit_receiver, cr0);
220 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
222 __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
223 RelocInfo::CODE_TARGET);
224 __ b(&post_instantiation_deopt_entry);
227 __ bind(¬_create_implicit_receiver);
228 __ LoadRoot(r3, RootIndex::kTheHoleValue);
239 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
241 __ bind(&post_instantiation_deopt_entry);
261 __ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
262 __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
266 __ addi(r7, fp, Operand(StandardFrameConstants::kCallerSPOffset));
268 Label enough_stack_space, stack_overflow;
269 Generate_StackOverflowCheck(masm, r3, r8, &stack_overflow);
270 __ b(&enough_stack_space);
272 __ bind(&stack_overflow);
274 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
275 __ CallRuntime(Runtime::kThrowStackOverflow);
279 __ bind(&enough_stack_space);
295 __ cmpi(r3, Operand::Zero());
297 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
301 __ subi(ip, ip, Operand(kPointerSize));
302 __ LoadPX(r0, MemOperand(r7, ip));
303 __ StorePX(r0, MemOperand(sp, ip));
309 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
310 ParameterCount actual(r3);
311 __ InvokeFunction(r4, r6, actual, CALL_FUNCTION);
324 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
328 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
333 Label use_receiver, do_throw, leave_frame;
336 __ JumpIfRoot(r3, RootIndex::kUndefinedValue, &use_receiver);
342 __ JumpIfSmi(r3, &use_receiver);
346 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
347 __ CompareObjectType(r3, r7, r7, FIRST_JS_RECEIVER_TYPE);
348 __ bge(&leave_frame);
352 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
356 __ bind(&use_receiver);
357 __ LoadP(r3, MemOperand(sp));
358 __ JumpIfRoot(r3, RootIndex::kTheHoleValue, &do_throw);
360 __ bind(&leave_frame);
362 __ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
367 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
369 __ SmiToPtrArrayOffset(r4, r4);
371 __ addi(sp, sp, Operand(kPointerSize));
375 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
376 Generate_JSBuiltinsConstructStubHelper(masm);
379 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
384 __ CompareObjectType(sfi_data, scratch1, scratch1, INTERPRETER_DATA_TYPE);
387 FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
392 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
398 __ AssertGeneratorObject(r4);
401 __ StoreP(r3, FieldMemOperand(r4, JSGeneratorObject::kInputOrDebugPosOffset),
403 __ RecordWriteField(r4, JSGeneratorObject::kInputOrDebugPosOffset, r3, r6,
404 kLRHasNotBeenSaved, kDontSaveFPRegs);
407 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
408 __ LoadP(cp, FieldMemOperand(r7, JSFunction::kContextOffset));
411 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
412 Label stepping_prepared;
413 ExternalReference debug_hook =
414 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
415 __ Move(ip, debug_hook);
416 __ LoadByte(ip, MemOperand(ip), r0);
418 __ CmpSmiLiteral(ip, Smi::zero(), r0);
419 __ bne(&prepare_step_in_if_stepping);
423 ExternalReference debug_suspended_generator =
424 ExternalReference::debug_suspended_generator_address(masm->isolate());
426 __ Move(ip, debug_suspended_generator);
427 __ LoadP(ip, MemOperand(ip));
429 __ beq(&prepare_step_in_suspended_generator);
430 __ bind(&stepping_prepared);
434 Label stack_overflow;
435 __ CompareRoot(sp, RootIndex::kRealStackLimit);
436 __ blt(&stack_overflow);
439 __ LoadP(ip, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
451 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
453 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
454 __ LoadP(r5, FieldMemOperand(
455 r4, JSGeneratorObject::kParametersAndRegistersOffset));
457 Label loop, done_loop;
458 __ cmpi(r6, Operand::Zero());
463 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
467 __ LoadPU(ip, MemOperand(r9, kPointerSize));
475 if (FLAG_debug_code) {
476 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
477 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
478 GetSharedFunctionInfoBytecode(masm, r6, r3);
479 __ CompareObjectType(r6, r6, r6, BYTECODE_ARRAY_TYPE);
480 __ Assert(eq, AbortReason::kMissingBytecodeArray);
490 static_assert(kJavaScriptCallCodeStartRegister == r5,
"ABI mismatch");
491 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
492 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
493 __ JumpToJSEntry(r5);
496 __ bind(&prepare_step_in_if_stepping);
498 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
501 __ PushRoot(RootIndex::kTheHoleValue);
502 __ CallRuntime(Runtime::kDebugOnFunctionCall);
504 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
506 __ b(&stepping_prepared);
508 __ bind(&prepare_step_in_suspended_generator);
510 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
512 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
514 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
516 __ b(&stepping_prepared);
518 __ bind(&stack_overflow);
520 FrameScope scope(masm, StackFrame::INTERNAL);
521 __ CallRuntime(Runtime::kThrowStackOverflow);
526 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
527 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
529 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
533 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
538 __ LoadRoot(r5, RootIndex::kRealStackLimit);
543 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
548 __ CallRuntime(Runtime::kThrowStackOverflow);
553 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
565 FrameScope scope(masm, StackFrame::INTERNAL);
568 ExternalReference context_address = ExternalReference::Create(
569 IsolateAddressId::kContextAddress, masm->isolate());
570 __ Move(cp, context_address);
571 __ LoadP(cp, MemOperand(cp));
578 Generate_CheckStackOverflow(masm, r6);
585 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
590 __ LoadP(r8, MemOperand(r7));
591 __ addi(r7, r7, Operand(kPointerSize));
592 __ LoadP(r0, MemOperand(r8));
605 __ LoadRoot(r7, RootIndex::kUndefinedValue);
612 Handle<Code> builtin = is_construct
613 ? BUILTIN_CODE(masm->isolate(), Construct)
614 : masm->isolate()->builtins()->Call();
615 __ Call(builtin, RelocInfo::CODE_TARGET);
625 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
626 Generate_JSEntryTrampolineHelper(masm,
false);
629 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
630 Generate_JSEntryTrampolineHelper(masm,
true);
633 static void ReplaceClosureCodeWithOptimizedCode(
634 MacroAssembler* masm, Register optimized_code, Register closure,
635 Register scratch1, Register scratch2, Register scratch3) {
637 __ StoreP(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset),
639 __ mr(scratch1, optimized_code);
640 __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
641 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
645 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
646 Register args_count = scratch;
650 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
652 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
655 __ LeaveFrame(StackFrame::INTERPRETED);
657 __ add(sp, sp, args_count);
661 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
663 OptimizationMarker marker,
664 Runtime::FunctionId function_id) {
666 __ CmpSmiLiteral(smi_entry, Smi::FromEnum(marker), r0);
668 GenerateTailCallToReturnedCode(masm, function_id);
672 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
673 Register feedback_vector,
674 Register scratch1, Register scratch2,
683 !AreAliased(feedback_vector, r3, r4, r6, scratch1, scratch2, scratch3));
685 Label optimized_code_slot_is_weak_ref, fallthrough;
687 Register closure = r4;
688 Register optimized_code_entry = scratch1;
691 optimized_code_entry,
692 FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
697 __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
703 __ CmpSmiLiteral(optimized_code_entry,
704 Smi::FromEnum(OptimizationMarker::kNone), r0);
705 __ beq(&fallthrough);
707 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
708 OptimizationMarker::kLogFirstExecution,
709 Runtime::kFunctionFirstExecution);
710 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
711 OptimizationMarker::kCompileOptimized,
712 Runtime::kCompileOptimized_NotConcurrent);
713 TailCallRuntimeIfMarkerEquals(
714 masm, optimized_code_entry,
715 OptimizationMarker::kCompileOptimizedConcurrent,
716 Runtime::kCompileOptimized_Concurrent);
721 if (FLAG_debug_code) {
723 optimized_code_entry,
724 Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0);
725 __ Assert(eq, AbortReason::kExpectedOptimizationSentinel);
733 __ bind(&optimized_code_slot_is_weak_ref);
735 __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
739 Label found_deoptimized_code;
740 __ LoadP(scratch2, FieldMemOperand(optimized_code_entry,
741 Code::kCodeDataContainerOffset));
744 FieldMemOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset));
745 __ TestBit(scratch2, Code::kMarkedForDeoptimizationBit, r0);
746 __ bne(&found_deoptimized_code, cr0);
752 ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
753 scratch2, scratch3, feedback_vector);
754 static_assert(kJavaScriptCallCodeStartRegister == r5,
"ABI mismatch");
755 __ addi(r5, optimized_code_entry,
756 Operand(Code::kHeaderSize - kHeapObjectTag));
761 __ bind(&found_deoptimized_code);
762 GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
767 __ bind(&fallthrough);
773 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
774 Register bytecode_array,
775 Register bytecode_offset,
776 Register bytecode, Register scratch1,
778 Register bytecode_size_table = scratch1;
779 Register scratch2 = bytecode;
780 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
782 __ Move(bytecode_size_table,
783 ExternalReference::bytecode_size_table_address());
786 Label process_bytecode, extra_wide;
787 STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
788 STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
789 STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
791 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
792 __ cmpi(bytecode, Operand(0x3));
793 __ bgt(&process_bytecode);
794 __ andi(r0, bytecode, Operand(0x1));
795 __ bne(&extra_wide, cr0);
798 __ addi(bytecode_offset, bytecode_offset, Operand(1));
799 __ lbzx(bytecode, MemOperand(bytecode_array, bytecode_offset));
800 __ addi(bytecode_size_table, bytecode_size_table,
801 Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
802 __ b(&process_bytecode);
804 __ bind(&extra_wide);
806 __ addi(bytecode_offset, bytecode_offset, Operand(1));
807 __ lbzx(bytecode, MemOperand(bytecode_array, bytecode_offset));
808 __ addi(bytecode_size_table, bytecode_size_table,
809 Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
812 __ bind(&process_bytecode);
815 #define JUMP_IF_EQUAL(NAME) \ 817 Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \ 819 RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
823 __ ShiftLeftImm(scratch2, bytecode, Operand(2));
824 __ lwzx(scratch2, MemOperand(bytecode_size_table, scratch2));
825 __ add(bytecode_offset, bytecode_offset, scratch2);
843 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
844 Register closure = r4;
845 Register feedback_vector = r5;
848 __ LoadP(feedback_vector,
849 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
850 __ LoadP(feedback_vector,
851 FieldMemOperand(feedback_vector, Cell::kValueOffset));
854 MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, r7, r9, r8);
859 FrameScope frame_scope(masm, StackFrame::MANUAL);
860 __ PushStandardFrame(closure);
864 __ LoadP(r3, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
866 __ LoadP(kInterpreterBytecodeArrayRegister,
867 FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
868 GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, r7);
873 FieldMemOperand(feedback_vector, FeedbackVector::kInvocationCountOffset),
875 __ addi(r8, r8, Operand(1));
878 FieldMemOperand(feedback_vector, FeedbackVector::kInvocationCountOffset),
883 if (FLAG_debug_code) {
884 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
886 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
888 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
889 BYTECODE_ARRAY_TYPE);
891 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
895 __ mov(r8, Operand(BytecodeArray::kNoAgeBytecodeAge));
896 __ StoreByte(r8, FieldMemOperand(kInterpreterBytecodeArrayRegister,
897 BytecodeArray::kBytecodeAgeOffset),
901 __ mov(kInterpreterBytecodeOffsetRegister,
902 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
905 __ SmiTag(r3, kInterpreterBytecodeOffsetRegister);
906 __ Push(kInterpreterBytecodeArrayRegister, r3);
911 __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister,
912 BytecodeArray::kFrameSizeOffset));
917 __ LoadRoot(r0, RootIndex::kRealStackLimit);
920 __ CallRuntime(Runtime::kThrowStackOverflow);
926 __ LoadRoot(r8, RootIndex::kUndefinedValue);
927 __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC);
928 __ beq(&no_args, cr0);
938 Label no_incoming_new_target_or_generator_register;
941 kInterpreterBytecodeArrayRegister,
942 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
943 __ cmpi(r8, Operand::Zero());
944 __ beq(&no_incoming_new_target_or_generator_register);
945 __ ShiftLeftImm(r8, r8, Operand(kPointerSizeLog2));
946 __ StorePX(r6, MemOperand(fp, r8));
947 __ bind(&no_incoming_new_target_or_generator_register);
950 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
954 __ bind(&do_dispatch);
956 kInterpreterDispatchTableRegister,
957 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
958 __ lbzx(r6, MemOperand(kInterpreterBytecodeArrayRegister,
959 kInterpreterBytecodeOffsetRegister));
960 __ ShiftLeftImm(r6, r6, Operand(kPointerSizeLog2));
961 __ LoadPX(kJavaScriptCallCodeStartRegister,
962 MemOperand(kInterpreterDispatchTableRegister, r6));
963 __ Call(kJavaScriptCallCodeStartRegister);
965 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
971 __ LoadP(kInterpreterBytecodeArrayRegister,
972 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
973 __ LoadP(kInterpreterBytecodeOffsetRegister,
974 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
975 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
979 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
980 kInterpreterBytecodeOffsetRegister));
981 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
982 kInterpreterBytecodeOffsetRegister, r4, r5,
988 LeaveInterpreterFrame(masm, r5);
992 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
993 Register num_args, Register index,
994 Register count, Register scratch) {
996 __ cmpi(count, Operand::Zero());
998 __ addi(index, index, Operand(kPointerSize));
1001 __ LoadPU(scratch, MemOperand(index, -kPointerSize));
1008 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1009 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1010 InterpreterPushArgsMode mode) {
1011 DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1019 Label stack_overflow;
1022 __ addi(r6, r3, Operand(1));
1024 Generate_StackOverflowCheck(masm, r6, ip, &stack_overflow);
1027 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1028 __ PushRoot(RootIndex::kUndefinedValue);
1033 Generate_InterpreterPushArgs(masm, r6, r5, r6, r7);
1035 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1037 __ subi(r3, r3, Operand(1));
1041 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1042 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1043 RelocInfo::CODE_TARGET);
1045 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1046 RelocInfo::CODE_TARGET);
1049 __ bind(&stack_overflow);
1051 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1058 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1059 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1067 Label stack_overflow;
1070 __ li(r0, Operand::Zero());
1075 __ cmpi(r3, Operand::Zero());
1077 Generate_StackOverflowCheck(masm, r3, ip, &stack_overflow);
1079 Generate_InterpreterPushArgs(masm, r3, r7, r3, r9);
1081 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1083 __ subi(r3, r3, Operand(1));
1085 __ AssertUndefinedOrAllocationSite(r5, r8);
1087 if (mode == InterpreterPushArgsMode::kArrayFunction) {
1088 __ AssertFunction(r4);
1092 Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1093 __ Jump(code, RelocInfo::CODE_TARGET);
1094 }
else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1096 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1097 RelocInfo::CODE_TARGET);
1099 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1101 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1104 __ bind(&stack_overflow);
1106 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1112 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1115 Label builtin_trampoline, trampoline_loaded;
1116 Smi interpreter_entry_return_pc_offset(
1117 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1118 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1124 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1125 __ LoadP(r5, FieldMemOperand(r5, JSFunction::kSharedFunctionInfoOffset));
1126 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
1127 __ CompareObjectType(r5, kInterpreterDispatchTableRegister,
1128 kInterpreterDispatchTableRegister,
1129 INTERPRETER_DATA_TYPE);
1130 __ bne(&builtin_trampoline);
1133 FieldMemOperand(r5, InterpreterData::kInterpreterTrampolineOffset));
1134 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
1135 __ b(&trampoline_loaded);
1137 __ bind(&builtin_trampoline);
1138 __ Move(r5, ExternalReference::
1139 address_of_interpreter_entry_trampoline_instruction_start(
1141 __ LoadP(r5, MemOperand(r5));
1143 __ bind(&trampoline_loaded);
1144 __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset->value()));
1149 kInterpreterDispatchTableRegister,
1150 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1153 __ LoadP(kInterpreterBytecodeArrayRegister,
1154 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1156 if (FLAG_debug_code) {
1158 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1160 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1162 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg,
1163 BYTECODE_ARRAY_TYPE);
1165 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1169 __ LoadP(kInterpreterBytecodeOffsetRegister,
1170 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1171 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1174 __ lbzx(ip, MemOperand(kInterpreterBytecodeArrayRegister,
1175 kInterpreterBytecodeOffsetRegister));
1176 __ ShiftLeftImm(ip, ip, Operand(kPointerSizeLog2));
1177 __ LoadPX(kJavaScriptCallCodeStartRegister,
1178 MemOperand(kInterpreterDispatchTableRegister, ip));
1179 __ Jump(kJavaScriptCallCodeStartRegister);
1182 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1184 __ LoadP(kInterpreterBytecodeArrayRegister,
1185 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1186 __ LoadP(kInterpreterBytecodeOffsetRegister,
1187 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1188 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1191 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1192 kInterpreterBytecodeOffsetRegister));
1196 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1197 kInterpreterBytecodeOffsetRegister, r4, r5,
1201 __ SmiTag(r5, kInterpreterBytecodeOffsetRegister);
1203 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1205 Generate_InterpreterEnterBytecode(masm);
1208 __ bind(&if_return);
1209 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1212 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1213 Generate_InterpreterEnterBytecode(masm);
1216 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1224 FrameScope scope(masm, StackFrame::INTERNAL);
1230 __ Push(r3, r4, r6, r4);
1234 for (
int j = 0; j < 4; ++j) {
1237 __ cmpi(r7, Operand(j));
1240 for (
int i = j - 1;
i >= 0; --
i) {
1241 __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1245 for (
int i = 0;
i < 3 - j; ++
i) {
1246 __ PushRoot(RootIndex::kUndefinedValue);
1253 __ bind(&args_done);
1256 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1258 __ JumpIfSmi(r3, &failed);
1263 scope.GenerateLeaveFrame();
1265 __ addi(r7, r7, Operand(1));
1276 static_assert(kJavaScriptCallCodeStartRegister == r5,
"ABI mismatch");
1277 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
1278 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
1279 __ JumpToJSEntry(r5);
1283 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1284 bool java_script_builtin,
1286 const RegisterConfiguration* config(RegisterConfiguration::Default());
1287 int allocatable_register_count = config->num_allocatable_general_registers();
1293 sp, config->num_allocatable_general_registers() * kPointerSize +
1294 BuiltinContinuationFrameConstants::kFixedFrameSize));
1296 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
1297 int code = config->GetAllocatableGeneralCode(
i);
1298 __ Pop(Register::from_code(code));
1299 if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1300 __ SmiUntag(Register::from_code(code));
1305 MemOperand(sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1308 Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1311 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
1316 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1317 Generate_ContinueToBuiltinHelper(masm,
false,
false);
1320 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1321 MacroAssembler* masm) {
1322 Generate_ContinueToBuiltinHelper(masm,
false,
true);
1325 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1326 Generate_ContinueToBuiltinHelper(masm,
true,
false);
1329 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1330 MacroAssembler* masm) {
1331 Generate_ContinueToBuiltinHelper(masm,
true,
true);
1334 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1336 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1337 __ CallRuntime(Runtime::kNotifyDeoptimized);
1340 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r3.code());
1341 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1342 __ addi(sp, sp, Operand(1 * kPointerSize));
1346 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1348 __ LoadP(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1349 __ LoadP(r3, MemOperand(r3, JavaScriptFrameConstants::kFunctionOffset));
1352 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1355 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1360 __ CmpSmiLiteral(r3, Smi::zero(), r0);
1368 __ LeaveFrame(StackFrame::STUB);
1372 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1375 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1376 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
1378 if (FLAG_enable_embedded_constant_pool) {
1379 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1385 FieldMemOperand(r4, FixedArray::OffsetOfElementAt(
1386 DeoptimizationData::kOsrPcOffsetIndex)));
1399 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1412 Register arg_size = r8;
1413 Register new_sp = r6;
1414 Register scratch = r7;
1415 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1416 __ add(new_sp, sp, arg_size);
1417 __ LoadRoot(scratch, RootIndex::kUndefinedValue);
1419 __ LoadP(r4, MemOperand(new_sp, 0));
1420 __ cmpi(arg_size, Operand(kPointerSize));
1422 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize));
1424 __ LoadP(r5, MemOperand(new_sp, 2 * -kPointerSize));
1427 __ StoreP(scratch, MemOperand(sp, 0));
1442 __ JumpIfRoot(r5, RootIndex::kNullValue, &no_arguments);
1443 __ JumpIfRoot(r5, RootIndex::kUndefinedValue, &no_arguments);
1446 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1447 RelocInfo::CODE_TARGET);
1451 __ bind(&no_arguments);
1453 __ li(r3, Operand::Zero());
1454 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1459 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1464 __ cmpi(r3, Operand::Zero());
1466 __ PushRoot(RootIndex::kUndefinedValue);
1467 __ addi(r3, r3, Operand(1));
1473 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
1474 __ LoadPX(r4, MemOperand(sp, r5));
1488 __ LoadP(ip, MemOperand(r5, -kPointerSize));
1489 __ StoreP(ip, MemOperand(r5));
1490 __ subi(r5, r5, Operand(kPointerSize));
1494 __ subi(r3, r3, Operand(1));
1499 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1502 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1516 Register arg_size = r8;
1517 Register new_sp = r6;
1518 Register scratch = r7;
1519 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1520 __ add(new_sp, sp, arg_size);
1521 __ LoadRoot(r4, RootIndex::kUndefinedValue);
1524 __ cmpi(arg_size, Operand(kPointerSize));
1526 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize));
1528 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize));
1529 __ cmpi(arg_size, Operand(2 * kPointerSize));
1531 __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize));
1534 __ StoreP(scratch, MemOperand(sp, 0));
1548 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1549 RelocInfo::CODE_TARGET);
1552 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1567 Register arg_size = r8;
1568 Register new_sp = r7;
1569 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1570 __ add(new_sp, sp, arg_size);
1571 __ LoadRoot(r4, RootIndex::kUndefinedValue);
1574 __ StoreP(r4, MemOperand(new_sp, 0));
1575 __ cmpi(arg_size, Operand(kPointerSize));
1577 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize));
1580 __ LoadP(r5, MemOperand(new_sp, 2 * -kPointerSize));
1581 __ cmpi(arg_size, Operand(2 * kPointerSize));
1583 __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize));
1604 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1605 RelocInfo::CODE_TARGET);
1608 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1610 __ mov(r7, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1613 if (FLAG_enable_embedded_constant_pool) {
1614 __ Push(fp, kConstantPoolRegister, r7, r4, r3);
1616 __ Push(fp, r7, r4, r3);
1618 __ Push(Smi::zero());
1620 Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
1623 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1629 __ LoadP(r4, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1630 int stack_adjustment = kPointerSize;
1631 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1632 __ SmiToPtrArrayOffset(r0, r4);
1637 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1638 Handle<Code> code) {
1647 Register scratch = ip;
1649 if (masm->emit_debug_code()) {
1652 __ AssertNotSmi(r5);
1653 __ LoadP(scratch, FieldMemOperand(r5, HeapObject::kMapOffset));
1654 __ LoadHalfWord(scratch,
1655 FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1656 __ cmpi(scratch, Operand(FIXED_ARRAY_TYPE));
1658 __ cmpi(scratch, Operand(FIXED_DOUBLE_ARRAY_TYPE));
1660 __ cmpi(r7, Operand::Zero());
1664 __ Abort(AbortReason::kOperandIsNotAFixedArray);
1670 Label stack_overflow;
1671 Generate_StackOverflowCheck(masm, r7, ip, &stack_overflow);
1675 Label loop, no_args, skip;
1676 __ cmpi(r7, Operand::Zero());
1679 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
1682 __ LoadPU(ip, MemOperand(r5, kPointerSize));
1683 __ CompareRoot(ip, RootIndex::kTheHoleValue);
1685 __ LoadRoot(ip, RootIndex::kUndefinedValue);
1694 __ Jump(code, RelocInfo::CODE_TARGET);
1696 __ bind(&stack_overflow);
1697 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1701 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1702 CallOrConstructMode mode,
1703 Handle<Code> code) {
1711 Register scratch = r9;
1713 if (mode == CallOrConstructMode::kConstruct) {
1714 Label new_target_constructor, new_target_not_constructor;
1715 __ JumpIfSmi(r6, &new_target_not_constructor);
1716 __ LoadP(scratch, FieldMemOperand(r6, HeapObject::kMapOffset));
1717 __ lbz(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1718 __ TestBit(scratch, Map::IsConstructorBit::kShift, r0);
1719 __ bne(&new_target_constructor, cr0);
1720 __ bind(&new_target_not_constructor);
1722 FrameScope scope(masm, StackFrame::MANUAL);
1723 __ EnterFrame(StackFrame::INTERNAL);
1725 __ CallRuntime(Runtime::kThrowNotConstructor);
1727 __ bind(&new_target_constructor);
1731 Label arguments_adaptor, arguments_done;
1732 __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1733 __ LoadP(ip, MemOperand(r7, CommonFrameConstants::kContextOrFrameTypeOffset));
1734 __ cmpi(ip, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1735 __ beq(&arguments_adaptor);
1737 __ LoadP(r8, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1738 __ LoadP(r8, FieldMemOperand(r8, JSFunction::kSharedFunctionInfoOffset));
1741 FieldMemOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
1744 __ b(&arguments_done);
1745 __ bind(&arguments_adaptor);
1748 __ LoadP(r8, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset));
1751 __ bind(&arguments_done);
1753 Label stack_done, stack_overflow;
1755 __ cmpi(r8, Operand::Zero());
1756 __ ble(&stack_done);
1759 Generate_StackOverflowCheck(masm, r8, r5, &stack_overflow);
1764 __ addi(r7, r7, Operand(kPointerSize));
1768 __ ShiftLeftImm(ip, r8, Operand(kPointerSizeLog2));
1769 __ LoadPX(ip, MemOperand(r7, ip));
1771 __ subi(r8, r8, Operand(1));
1772 __ cmpi(r8, Operand::Zero());
1778 __ bind(&stack_overflow);
1779 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1780 __ bind(&stack_done);
1783 __ Jump(code, RelocInfo::CODE_TARGET);
1787 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1788 ConvertReceiverMode mode) {
1793 __ AssertFunction(r4);
1797 Label class_constructor;
1798 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1799 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kFlagsOffset));
1800 __ TestBitMask(r6, SharedFunctionInfo::IsClassConstructorBit::kMask, r0);
1801 __ bne(&class_constructor, cr0);
1806 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1810 Operand(SharedFunctionInfo::IsStrictBit::kMask |
1811 SharedFunctionInfo::IsNativeBit::kMask));
1812 __ bne(&done_convert, cr0);
1821 if (mode == ConvertReceiverMode::kNullOrUndefined) {
1823 __ LoadGlobalProxy(r6);
1825 Label convert_to_object, convert_receiver;
1826 __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2));
1827 __ LoadPX(r6, MemOperand(sp, r6));
1828 __ JumpIfSmi(r6, &convert_to_object);
1829 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1830 __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
1831 __ bge(&done_convert);
1832 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1833 Label convert_global_proxy;
1834 __ JumpIfRoot(r6, RootIndex::kUndefinedValue, &convert_global_proxy);
1835 __ JumpIfNotRoot(r6, RootIndex::kNullValue, &convert_to_object);
1836 __ bind(&convert_global_proxy);
1839 __ LoadGlobalProxy(r6);
1841 __ b(&convert_receiver);
1843 __ bind(&convert_to_object);
1848 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1853 __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1854 RelocInfo::CODE_TARGET);
1860 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1861 __ bind(&convert_receiver);
1863 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
1864 __ StorePX(r6, MemOperand(sp, r7));
1866 __ bind(&done_convert);
1876 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
1877 ParameterCount actual(r3);
1878 ParameterCount expected(r5);
1879 __ InvokeFunctionCode(r4, no_reg, expected, actual, JUMP_FUNCTION);
1882 __ bind(&class_constructor);
1884 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
1886 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
1892 void Generate_PushBoundArguments(MacroAssembler* masm) {
1900 Label no_bound_arguments;
1901 __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset));
1902 __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset));
1903 __ SmiUntag(r7, SetRC);
1904 __ beq(&no_bound_arguments, cr0);
1918 __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2));
1919 __ sub(sp, sp, r10);
1923 __ CompareRoot(sp, RootIndex::kRealStackLimit);
1928 FrameScope scope(masm, StackFrame::MANUAL);
1929 __ EnterFrame(StackFrame::INTERNAL);
1930 __ CallRuntime(Runtime::kThrowStackOverflow);
1941 __ li(r8, Operand::Zero());
1942 __ cmpi(r3, Operand::Zero());
1946 __ LoadPX(r0, MemOperand(r9, r8));
1947 __ StorePX(r0, MemOperand(sp, r8));
1948 __ addi(r8, r8, Operand(kPointerSize));
1956 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1957 __ add(r5, r5, r10);
1960 __ LoadPU(r0, MemOperand(r5, -kPointerSize));
1961 __ StorePX(r0, MemOperand(sp, r8));
1962 __ addi(r8, r8, Operand(kPointerSize));
1967 __ bind(&no_bound_arguments);
1973 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
1978 __ AssertBoundFunction(r4);
1981 __ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
1982 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
1983 __ StorePX(ip, MemOperand(sp, r0));
1986 Generate_PushBoundArguments(masm);
1990 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
1991 __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
1992 RelocInfo::CODE_TARGET);
1996 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2002 Label non_callable, non_function, non_smi;
2003 __ JumpIfSmi(r4, &non_callable);
2005 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
2006 __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2007 RelocInfo::CODE_TARGET, eq);
2008 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2009 __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2010 RelocInfo::CODE_TARGET, eq);
2013 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2014 __ TestBit(r7, Map::IsCallableBit::kShift, r0);
2015 __ beq(&non_callable, cr0);
2018 __ cmpi(r8, Operand(JS_PROXY_TYPE));
2019 __ bne(&non_function);
2020 __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
2024 __ bind(&non_function);
2026 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2027 __ StorePX(r4, MemOperand(sp, r8));
2029 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4);
2030 __ Jump(masm->isolate()->builtins()->CallFunction(
2031 ConvertReceiverMode::kNotNullOrUndefined),
2032 RelocInfo::CODE_TARGET);
2035 __ bind(&non_callable);
2037 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2039 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2044 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2050 __ AssertConstructor(r4);
2051 __ AssertFunction(r4);
2055 __ LoadRoot(r5, RootIndex::kUndefinedValue);
2057 Label call_generic_stub;
2060 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2061 __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kFlagsOffset));
2062 __ mov(ip, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2063 __ and_(r7, r7, ip, SetRC);
2064 __ beq(&call_generic_stub, cr0);
2066 __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2067 RelocInfo::CODE_TARGET);
2069 __ bind(&call_generic_stub);
2070 __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2071 RelocInfo::CODE_TARGET);
2075 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2081 __ AssertConstructor(r4);
2082 __ AssertBoundFunction(r4);
2085 Generate_PushBoundArguments(masm);
2092 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2097 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2098 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2102 void Builtins::Generate_Construct(MacroAssembler* masm) {
2111 Label non_constructor, non_proxy;
2112 __ JumpIfSmi(r4, &non_constructor);
2115 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
2116 __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
2117 __ TestBit(r5, Map::IsConstructorBit::kShift, r0);
2118 __ beq(&non_constructor, cr0);
2121 __ CompareInstanceType(r7, r8, JS_FUNCTION_TYPE);
2122 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2123 RelocInfo::CODE_TARGET, eq);
2127 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2128 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2129 RelocInfo::CODE_TARGET, eq);
2132 __ cmpi(r8, Operand(JS_PROXY_TYPE));
2134 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2135 RelocInfo::CODE_TARGET);
2138 __ bind(&non_proxy);
2141 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2142 __ StorePX(r4, MemOperand(sp, r8));
2144 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4);
2145 __ Jump(masm->isolate()->builtins()->CallFunction(),
2146 RelocInfo::CODE_TARGET);
2151 __ bind(&non_constructor);
2152 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2153 RelocInfo::CODE_TARGET);
2156 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2164 Label invoke, dont_adapt_arguments, stack_overflow;
2166 Label enough, too_few;
2167 __ cmpli(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2168 __ beq(&dont_adapt_arguments);
2174 EnterArgumentsAdaptorFrame(masm);
2175 Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
2182 __ SmiToPtrArrayOffset(r3, r3);
2185 __ addi(r3, r3, Operand(2 * kPointerSize));
2186 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2198 __ LoadP(r0, MemOperand(r3, 0));
2201 __ subi(r3, r3, Operand(kPointerSize));
2210 EnterArgumentsAdaptorFrame(masm);
2211 Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
2218 __ SmiToPtrArrayOffset(r3, r3);
2229 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
2232 __ subi(r3, r3, Operand(kPointerSize));
2239 __ LoadRoot(r0, RootIndex::kUndefinedValue);
2240 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2244 Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
2260 static_assert(kJavaScriptCallCodeStartRegister == r5,
"ABI mismatch");
2261 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
2262 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
2266 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2269 LeaveArgumentsAdaptorFrame(masm);
2275 __ bind(&dont_adapt_arguments);
2276 static_assert(kJavaScriptCallCodeStartRegister == r5,
"ABI mismatch");
2277 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
2278 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
2279 __ JumpToJSEntry(r5);
2281 __ bind(&stack_overflow);
2283 FrameScope frame(masm, StackFrame::MANUAL);
2284 __ CallRuntime(Runtime::kThrowStackOverflow);
2289 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2292 __ SmiTag(kWasmCompileLazyFuncIndexRegister,
2293 kWasmCompileLazyFuncIndexRegister);
2295 HardAbortScope hard_abort(masm);
2296 FrameAndConstantPoolScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2301 constexpr RegList gp_regs =
2302 Register::ListOf<r3, r4, r5, r6, r7, r8, r9, r10>();
2303 constexpr RegList fp_regs =
2304 DoubleRegister::ListOf<d1, d2, d3, d4, d5, d6, d7, d8>();
2305 __ MultiPush(gp_regs);
2306 __ MultiPushDoubles(fp_regs);
2310 __ Push(kWasmInstanceRegister, kWasmCompileLazyFuncIndexRegister);
2312 __ LoadP(r5, FieldMemOperand(kWasmInstanceRegister,
2313 WasmInstanceObject::kCEntryStubOffset));
2316 __ LoadSmiLiteral(cp, Smi::zero());
2317 __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, r5);
2319 __ mr(r11, kReturnRegister0);
2322 __ MultiPopDoubles(fp_regs);
2323 __ MultiPop(gp_regs);
2329 void Builtins::Generate_CEntry(MacroAssembler* masm,
int result_size,
2330 SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2331 bool builtin_exit_frame) {
2344 if (argv_mode == kArgvInRegister) {
2349 __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2));
2351 __ subi(r4, r4, Operand(kPointerSize));
2355 FrameScope scope(masm, StackFrame::MANUAL);
2358 int arg_stack_space = 1;
2361 bool needs_return_buffer =
2362 (result_size == 2 && !ABI_RETURNS_OBJECT_PAIRS_IN_REGS);
2363 if (needs_return_buffer) {
2364 arg_stack_space += result_size;
2368 save_doubles, arg_stack_space,
2369 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2380 Register isolate_reg = r5;
2381 if (needs_return_buffer) {
2387 __ addi(r3, sp, Operand((kStackFrameExtraParamSlot + 1) * kPointerSize));
2392 __ Move(isolate_reg, ExternalReference::isolate_address(masm->isolate()));
2394 Register target = r15;
2395 if (ABI_USES_FUNCTION_DESCRIPTORS) {
2397 __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(r15, kPointerSize));
2398 __ LoadP(ip, MemOperand(r15, 0));
2400 }
else if (ABI_CALL_VIA_IP) {
2410 constexpr
int after_call_offset = 5 * kInstrSize;
2411 DCHECK_NE(r7, target);
2413 __ bind(&start_call);
2414 __ addi(r7, r7, Operand(after_call_offset));
2415 __ StoreP(r7, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize));
2417 DCHECK_EQ(after_call_offset - kInstrSize,
2418 __ SizeOfCodeGeneratedSince(&start_call));
2421 if (needs_return_buffer) {
2422 __ LoadP(r4, MemOperand(r3, kPointerSize));
2423 __ LoadP(r3, MemOperand(r3));
2427 Label exception_returned;
2428 __ CompareRoot(r3, RootIndex::kException);
2429 __ beq(&exception_returned);
2433 if (FLAG_debug_code) {
2435 ExternalReference pending_exception_address = ExternalReference::Create(
2436 IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2438 __ Move(r6, pending_exception_address);
2439 __ LoadP(r6, MemOperand(r6));
2440 __ CompareRoot(r6, RootIndex::kTheHoleValue);
2443 __ stop(
"Unexpected pending exception");
2451 Register argc = argv_mode == kArgvInRegister
2456 __ LeaveExitFrame(save_doubles, argc);
2460 __ bind(&exception_returned);
2462 ExternalReference pending_handler_context_address = ExternalReference::Create(
2463 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2464 ExternalReference pending_handler_entrypoint_address =
2465 ExternalReference::Create(
2466 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2467 ExternalReference pending_handler_constant_pool_address =
2468 ExternalReference::Create(
2469 IsolateAddressId::kPendingHandlerConstantPoolAddress,
2471 ExternalReference pending_handler_fp_address = ExternalReference::Create(
2472 IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2473 ExternalReference pending_handler_sp_address = ExternalReference::Create(
2474 IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2478 ExternalReference find_handler =
2479 ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2481 FrameScope scope(masm, StackFrame::MANUAL);
2482 __ PrepareCallCFunction(3, 0, r3);
2483 __ li(r3, Operand::Zero());
2484 __ li(r4, Operand::Zero());
2485 __ Move(r5, ExternalReference::isolate_address(masm->isolate()));
2486 __ CallCFunction(find_handler, 3);
2490 __ Move(cp, pending_handler_context_address);
2491 __ LoadP(cp, MemOperand(cp));
2492 __ Move(sp, pending_handler_sp_address);
2493 __ LoadP(sp, MemOperand(sp));
2494 __ Move(fp, pending_handler_fp_address);
2495 __ LoadP(fp, MemOperand(fp));
2500 __ cmpi(cp, Operand::Zero());
2502 __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2509 __ ResetSpeculationPoisonRegister();
2512 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
2513 __ Move(ip, pending_handler_entrypoint_address);
2514 __ LoadP(ip, MemOperand(ip));
2515 if (FLAG_enable_embedded_constant_pool) {
2516 __ Move(kConstantPoolRegister, pending_handler_constant_pool_address);
2517 __ LoadP(kConstantPoolRegister, MemOperand(kConstantPoolRegister));
2522 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2523 Label out_of_range, only_low, negate, done, fastpath_done;
2524 Register result_reg = r3;
2526 HardAbortScope hard_abort(masm);
2529 Register scratch = GetRegisterThatIsNotOneOf(result_reg);
2530 Register scratch_low = GetRegisterThatIsNotOneOf(result_reg, scratch);
2531 Register scratch_high =
2532 GetRegisterThatIsNotOneOf(result_reg, scratch, scratch_low);
2533 DoubleRegister double_scratch = kScratchDoubleReg;
2535 __ Push(result_reg, scratch);
2537 int argument_offset = 2 * kPointerSize;
2540 __ lfd(double_scratch, MemOperand(sp, argument_offset));
2543 __ ConvertDoubleToInt64(double_scratch,
2544 #
if !V8_TARGET_ARCH_PPC64
2550 #if V8_TARGET_ARCH_PPC64 2551 __ TestIfInt32(result_reg, r0);
2553 __ TestIfInt32(scratch, result_reg, r0);
2555 __ beq(&fastpath_done);
2557 __ Push(scratch_high, scratch_low);
2559 argument_offset += 2 * kPointerSize;
2561 __ lwz(scratch_high,
2562 MemOperand(sp, argument_offset + Register::kExponentOffset));
2564 MemOperand(sp, argument_offset + Register::kMantissaOffset));
2566 __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask);
2569 STATIC_ASSERT(HeapNumber::kExponentBias + 1 == 1024);
2570 __ subi(scratch, scratch, Operand(HeapNumber::kExponentBias + 1));
2575 __ cmpi(scratch, Operand(83));
2576 __ bge(&out_of_range);
2583 __ subfic(scratch, scratch, Operand(51));
2584 __ cmpi(scratch, Operand::Zero());
2588 __ srw(scratch_low, scratch_low, scratch);
2592 __ subfic(scratch, scratch, Operand(32));
2593 __ ExtractBitMask(result_reg, scratch_high, HeapNumber::kMantissaMask);
2595 STATIC_ASSERT(HeapNumber::kMantissaBitsInTopWord >= 16);
2596 __ oris(result_reg, result_reg,
2597 Operand(1 << ((HeapNumber::kMantissaBitsInTopWord)-16)));
2598 __ slw(r0, result_reg, scratch);
2599 __ orx(result_reg, scratch_low, r0);
2602 __ bind(&out_of_range);
2603 __ mov(result_reg, Operand::Zero());
2609 __ neg(scratch, scratch);
2610 __ slw(result_reg, scratch_low, scratch);
2619 __ srawi(r0, scratch_high, 31);
2620 #if V8_TARGET_ARCH_PPC64 2621 __ srdi(r0, r0, Operand(32));
2623 __ xor_(result_reg, result_reg, r0);
2624 __ srwi(r0, scratch_high, Operand(31));
2625 __ add(result_reg, result_reg, r0);
2628 __ Pop(scratch_high, scratch_low);
2630 argument_offset -= 2 * kPointerSize;
2632 __ bind(&fastpath_done);
2633 __ StoreP(result_reg, MemOperand(sp, argument_offset));
2634 __ Pop(result_reg, scratch);
2639 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2640 const Register exponent = r5;
2641 const DoubleRegister double_base = d1;
2642 const DoubleRegister double_exponent = d2;
2643 const DoubleRegister double_result = d3;
2644 const DoubleRegister double_scratch = d0;
2645 const Register scratch = r11;
2646 const Register scratch2 = r10;
2648 Label call_runtime, done, int_exponent;
2651 __ TryDoubleToInt32Exact(scratch, double_exponent, scratch2, double_scratch);
2652 __ beq(&int_exponent);
2657 AllowExternalCallThatCantCauseGC scope(masm);
2658 __ PrepareCallCFunction(0, 2, scratch);
2659 __ MovToFloatParameters(double_base, double_exponent);
2660 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2664 __ MovFromFloatResult(double_result);
2668 __ bind(&int_exponent);
2672 __ mr(exponent, scratch);
2674 __ fmr(double_scratch, double_base);
2675 __ li(scratch2, Operand(1));
2676 __ ConvertIntToDouble(scratch2, double_result);
2679 __ cmpi(scratch, Operand::Zero());
2680 if (CpuFeatures::IsSupported(ISELECT)) {
2681 __ neg(scratch2, scratch);
2682 __ isel(lt, scratch, scratch2, scratch);
2684 Label positive_exponent;
2685 __ bge(&positive_exponent);
2686 __ neg(scratch, scratch);
2687 __ bind(&positive_exponent);
2690 Label while_true, no_carry, loop_end;
2691 __ bind(&while_true);
2692 __ andi(scratch2, scratch, Operand(1));
2693 __ beq(&no_carry, cr0);
2694 __ fmul(double_result, double_result, double_scratch);
2696 __ ShiftRightImm(scratch, scratch, Operand(1), SetRC);
2697 __ beq(&loop_end, cr0);
2698 __ fmul(double_scratch, double_scratch, double_scratch);
2702 __ cmpi(exponent, Operand::Zero());
2705 __ li(scratch2, Operand(1));
2706 __ ConvertIntToDouble(scratch2, double_scratch);
2707 __ fdiv(double_result, double_scratch, double_result);
2710 __ fcmpu(double_result, kDoubleRegZero);
2714 __ ConvertIntToDouble(exponent, double_exponent);
2720 AllowExternalCallThatCantCauseGC scope(masm);
2721 __ PrepareCallCFunction(0, 2, scratch);
2722 __ MovToFloatParameters(double_base, double_exponent);
2723 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2727 __ MovFromFloatResult(double_result);
2735 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2736 ElementsKind kind) {
2739 __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue);
2741 __ cmpli(r3, Operand(1));
2743 __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2745 RelocInfo::CODE_TARGET, lt);
2747 __ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
2748 RelocInfo::CODE_TARGET, gt);
2750 if (IsFastPackedElementsKind(kind)) {
2753 __ LoadP(r6, MemOperand(sp, 0));
2754 __ cmpi(r6, Operand::Zero());
2756 __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2757 masm->isolate(), GetHoleyElementsKind(kind))
2759 RelocInfo::CODE_TARGET, ne);
2763 CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2765 RelocInfo::CODE_TARGET);
2770 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2778 if (FLAG_debug_code) {
2783 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
2785 __ TestIfSmi(r6, r0);
2786 __ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction, cr0);
2787 __ CompareObjectType(r6, r6, r7, MAP_TYPE);
2788 __ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction);
2792 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
2794 __ lbz(r6, FieldMemOperand(r6, Map::kBitField2Offset));
2796 __ DecodeField<Map::ElementsKindBits>(r6);
2798 if (FLAG_debug_code) {
2800 __ cmpi(r6, Operand(PACKED_ELEMENTS));
2802 __ cmpi(r6, Operand(HOLEY_ELEMENTS));
2805 AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2809 Label fast_elements_case;
2810 __ cmpi(r6, Operand(PACKED_ELEMENTS));
2811 __ beq(&fast_elements_case);
2812 GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2814 __ bind(&fast_elements_case);
2815 GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2822 #endif // V8_TARGET_ARCH_PPC