5 #if V8_TARGET_ARCH_IA32 7 #include "src/base/adapters.h" 8 #include "src/code-factory.h" 9 #include "src/counters.h" 10 #include "src/debug/debug.h" 11 #include "src/deoptimizer.h" 12 #include "src/frame-constants.h" 13 #include "src/frames.h" 14 #include "src/objects-inl.h" 15 #include "src/objects/js-generator.h" 16 #include "src/objects/smi.h" 17 #include "src/register-configuration.h" 18 #include "src/wasm/wasm-linkage.h" 19 #include "src/wasm/wasm-objects.h" 24 #define __ ACCESS_MASM(masm) 26 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
27 ExitFrameType exit_frame_type) {
28 __ Move(kJavaScriptCallExtraArg1Register,
29 Immediate(ExternalReference::Create(address)));
30 if (exit_frame_type == BUILTIN_EXIT) {
31 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
32 RelocInfo::CODE_TARGET);
34 DCHECK(exit_frame_type == EXIT);
35 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
36 RelocInfo::CODE_TARGET);
40 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
41 Runtime::FunctionId function_id) {
48 FrameScope scope(masm, StackFrame::INTERNAL);
58 __ CallRuntime(function_id, 1);
68 static_assert(kJavaScriptCallCodeStartRegister == ecx,
"ABI mismatch");
69 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
75 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
85 FrameScope scope(masm, StackFrame::CONSTRUCT);
94 __ PushRoot(RootIndex::kTheHoleValue);
97 __ lea(esi, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
114 __ push(Operand(esi, ecx, times_4, 0));
117 __ j(greater_equal, &loop);
123 ParameterCount actual(eax);
125 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
126 __ InvokeFunction(edi, edx, actual, CALL_FUNCTION);
129 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
131 __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
136 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
137 __ PopReturnAddressTo(ecx);
138 __ lea(esp, Operand(esp, edx, times_2, 1 * kPointerSize));
139 __ PushReturnAddressFrom(ecx);
143 void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
144 Register scratch, Label* stack_overflow,
145 bool include_receiver =
false) {
149 ExternalReference real_stack_limit =
150 ExternalReference::address_of_real_stack_limit(masm->isolate());
153 __ mov(scratch, __ ExternalReferenceAsOperand(real_stack_limit, scratch));
154 __ sub(scratch, esp);
156 static_assert(kPointerSize == 4,
157 "The next instruction assumes kPointerSize == 4");
158 __ lea(scratch, Operand(scratch, num_args, times_4, 0));
159 if (include_receiver) {
160 __ add(scratch, Immediate(kPointerSize));
163 __ cmp(scratch, Immediate(0));
164 __ j(greater, stack_overflow);
170 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
181 FrameScope scope(masm, StackFrame::CONSTRUCT);
182 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
190 __ PushRoot(RootIndex::kTheHoleValue);
201 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
202 __ test(FieldOperand(eax, SharedFunctionInfo::kFlagsOffset),
203 Immediate(SharedFunctionInfo::IsDerivedConstructorBit::kMask));
204 __ j(not_zero, ¬_create_implicit_receiver);
207 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
209 __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
210 RelocInfo::CODE_TARGET);
211 __ jmp(&post_instantiation_deopt_entry, Label::kNear);
214 __ bind(¬_create_implicit_receiver);
215 __ LoadRoot(eax, RootIndex::kTheHoleValue);
226 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
228 __ bind(&post_instantiation_deopt_entry);
250 __ mov(eax, Operand(ebp, ConstructFrameConstants::kLengthOffset));
254 __ lea(edi, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
258 Label enough_stack_space, stack_overflow;
259 Generate_StackOverflowCheck(masm, eax, ecx, &stack_overflow);
260 __ jmp(&enough_stack_space);
262 __ bind(&stack_overflow);
264 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
265 __ CallRuntime(Runtime::kThrowStackOverflow);
269 __ bind(&enough_stack_space);
286 __ jmp(&entry, Label::kNear);
288 __ Push(Operand(edi, ecx, times_pointer_size, 0));
291 __ j(greater_equal, &loop);
294 __ mov(edi, Operand(ebp, ConstructFrameConstants::kConstructorOffset));
295 ParameterCount actual(eax);
296 __ InvokeFunction(edi, edx, actual, CALL_FUNCTION);
308 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
312 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
317 Label use_receiver, do_throw, leave_frame;
320 __ JumpIfRoot(eax, RootIndex::kUndefinedValue, &use_receiver, Label::kNear);
326 __ JumpIfSmi(eax, &use_receiver, Label::kNear);
330 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
331 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
332 __ j(above_equal, &leave_frame, Label::kNear);
333 __ jmp(&use_receiver, Label::kNear);
336 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
340 __ bind(&use_receiver);
341 __ mov(eax, Operand(esp, 0 * kPointerSize));
342 __ JumpIfRoot(eax, RootIndex::kTheHoleValue, &do_throw);
344 __ bind(&leave_frame);
346 __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
350 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
352 __ lea(esp, Operand(esp, edx, times_2, 1 * kPointerSize));
357 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
358 Generate_JSBuiltinsConstructStubHelper(masm);
361 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
362 FrameScope scope(masm, StackFrame::INTERNAL);
364 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
368 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
371 FrameScope scope(masm, StackFrame::INTERNAL);
373 const Register scratch1 = edx;
374 const Register scratch2 = edi;
377 ExternalReference context_address = ExternalReference::Create(
378 IsolateAddressId::kContextAddress, masm->isolate());
379 __ mov(esi, __ ExternalReferenceAsOperand(context_address, scratch1));
382 __ mov(scratch1, Operand(ebp, 0));
385 __ push(Operand(scratch1, EntryFrameConstants::kFunctionArgOffset));
386 __ push(Operand(scratch1, EntryFrameConstants::kReceiverArgOffset));
389 __ mov(eax, Operand(scratch1, EntryFrameConstants::kArgcOffset));
390 __ mov(scratch1, Operand(scratch1, EntryFrameConstants::kArgvOffset));
394 Label enough_stack_space, stack_overflow;
395 Generate_StackOverflowCheck(masm, eax, ecx, &stack_overflow);
396 __ jmp(&enough_stack_space);
398 __ bind(&stack_overflow);
399 __ CallRuntime(Runtime::kThrowStackOverflow);
403 __ bind(&enough_stack_space);
407 __ Move(ecx, Immediate(0));
408 __ jmp(&entry, Label::kNear);
411 __ mov(scratch2, Operand(scratch1, ecx, times_4, 0));
412 __ push(Operand(scratch2, 0));
416 __ j(not_equal, &loop);
419 __ mov(scratch2, Operand(ebp, 0));
422 __ mov(edx, Operand(scratch2, EntryFrameConstants::kNewTargetArgOffset));
423 __ mov(edi, Operand(scratch2, EntryFrameConstants::kFunctionArgOffset));
426 Handle<Code> builtin = is_construct
427 ? BUILTIN_CODE(masm->isolate(), Construct)
428 : masm->isolate()->builtins()->Call();
429 __ Call(builtin, RelocInfo::CODE_TARGET);
438 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
439 Generate_JSEntryTrampolineHelper(masm,
false);
442 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
443 Generate_JSEntryTrampolineHelper(masm,
true);
446 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
451 __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
452 __ j(not_equal, &done, Label::kNear);
454 FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
460 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
466 __ AssertGeneratorObject(edx);
469 __ mov(FieldOperand(edx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
470 __ RecordWriteField(edx, JSGeneratorObject::kInputOrDebugPosOffset, eax, ecx,
474 __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
475 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
478 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
479 Label stepping_prepared;
480 ExternalReference debug_hook =
481 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
482 __ cmpb(__ ExternalReferenceAsOperand(debug_hook, ecx), Immediate(0));
483 __ j(not_equal, &prepare_step_in_if_stepping);
486 ExternalReference debug_suspended_generator =
487 ExternalReference::debug_suspended_generator_address(masm->isolate());
488 __ cmp(edx, __ ExternalReferenceAsOperand(debug_suspended_generator, ecx));
489 __ j(equal, &prepare_step_in_suspended_generator);
490 __ bind(&stepping_prepared);
494 Label stack_overflow;
495 __ CompareRealStackLimit(esp);
496 __ j(below, &stack_overflow);
499 __ PopReturnAddressTo(eax);
502 __ Push(FieldOperand(edx, JSGeneratorObject::kReceiverOffset));
516 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
517 __ movzx_w(ecx, FieldOperand(
518 ecx, SharedFunctionInfo::kFormalParameterCountOffset));
520 FieldOperand(edx, JSGeneratorObject::kParametersAndRegistersOffset));
522 Label done_loop, loop;
527 __ j(greater_equal, &done_loop);
529 FieldOperand(ebx, edi, times_pointer_size, FixedArray::kHeaderSize));
530 __ add(edi, Immediate(1));
537 __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
542 if (FLAG_debug_code) {
543 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
544 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
546 GetSharedFunctionInfoBytecode(masm, ecx, eax);
548 __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, ecx);
549 __ Assert(equal, AbortReason::kMissingBytecodeArray);
554 __ PushReturnAddressFrom(eax);
555 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
556 __ movzx_w(eax, FieldOperand(
557 eax, SharedFunctionInfo::kFormalParameterCountOffset));
561 static_assert(kJavaScriptCallCodeStartRegister == ecx,
"ABI mismatch");
562 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
563 __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
567 __ bind(&prepare_step_in_if_stepping);
569 FrameScope scope(masm, StackFrame::INTERNAL);
573 __ PushRoot(RootIndex::kTheHoleValue);
574 __ CallRuntime(Runtime::kDebugOnFunctionCall);
576 __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
578 __ jmp(&stepping_prepared);
580 __ bind(&prepare_step_in_suspended_generator);
582 FrameScope scope(masm, StackFrame::INTERNAL);
584 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
586 __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
588 __ jmp(&stepping_prepared);
590 __ bind(&stack_overflow);
592 FrameScope scope(masm, StackFrame::INTERNAL);
593 __ CallRuntime(Runtime::kThrowStackOverflow);
598 static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm,
599 Register optimized_code,
604 __ mov(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
605 __ mov(scratch1, optimized_code);
606 __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
607 kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
610 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
612 Register args_count = scratch1;
613 Register return_pc = scratch2;
617 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
619 FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
626 __ add(esp, args_count);
631 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
633 OptimizationMarker marker,
634 Runtime::FunctionId function_id) {
636 __ cmp(smi_entry, Immediate(Smi::FromEnum(marker)));
637 __ j(not_equal, &no_match, Label::kNear);
638 GenerateTailCallToReturnedCode(masm, function_id);
642 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
650 DCHECK(!AreAliased(eax, edx, edi, scratch));
652 Label optimized_code_slot_is_weak_ref, fallthrough;
654 Register closure = edi;
656 Register feedback_vector = scratch;
659 Register optimized_code_entry = scratch;
660 __ mov(optimized_code_entry,
661 FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
666 __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
672 __ cmp(optimized_code_entry,
673 Immediate(Smi::FromEnum(OptimizationMarker::kNone)));
674 __ j(equal, &fallthrough);
679 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
680 OptimizationMarker::kLogFirstExecution,
681 Runtime::kFunctionFirstExecution);
682 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
683 OptimizationMarker::kCompileOptimized,
684 Runtime::kCompileOptimized_NotConcurrent);
685 TailCallRuntimeIfMarkerEquals(
686 masm, optimized_code_entry,
687 OptimizationMarker::kCompileOptimizedConcurrent,
688 Runtime::kCompileOptimized_Concurrent);
693 if (FLAG_debug_code) {
695 optimized_code_entry,
696 Immediate(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
697 __ Assert(equal, AbortReason::kExpectedOptimizationSentinel);
699 __ jmp(&fallthrough);
705 __ bind(&optimized_code_slot_is_weak_ref);
707 __ LoadWeakValue(optimized_code_entry, &fallthrough);
714 Label found_deoptimized_code;
716 FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
717 __ test(FieldOperand(eax, CodeDataContainer::kKindSpecificFlagsOffset),
718 Immediate(1 << Code::kMarkedForDeoptimizationBit));
719 __ j(not_zero, &found_deoptimized_code);
723 ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
725 static_assert(kJavaScriptCallCodeStartRegister == ecx,
"ABI mismatch");
726 __ Move(ecx, optimized_code_entry);
727 __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
734 __ bind(&found_deoptimized_code);
737 GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
742 __ bind(&fallthrough);
748 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
749 Register bytecode_array,
750 Register bytecode_offset,
751 Register scratch1, Register scratch2,
753 Register bytecode_size_table = scratch1;
754 Register bytecode = scratch2;
755 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
757 __ Move(bytecode_size_table,
758 Immediate(ExternalReference::bytecode_size_table_address()));
761 __ movzx_b(bytecode, Operand(kInterpreterBytecodeArrayRegister,
762 kInterpreterBytecodeOffsetRegister, times_1, 0));
765 Label process_bytecode, extra_wide;
766 STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
767 STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
768 STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
770 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
771 __ cmp(bytecode, Immediate(0x3));
772 __ j(above, &process_bytecode, Label::kNear);
773 __ test(bytecode, Immediate(0x1));
774 __ j(not_equal, &extra_wide, Label::kNear);
777 __ inc(bytecode_offset);
778 __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
779 __ add(bytecode_size_table,
780 Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
781 __ jmp(&process_bytecode, Label::kNear);
783 __ bind(&extra_wide);
785 __ inc(bytecode_offset);
786 __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
787 __ add(bytecode_size_table,
788 Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
790 __ bind(&process_bytecode);
793 #define JUMP_IF_EQUAL(NAME) \ 795 Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \ 796 __ j(equal, if_return); 797 RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
801 __ add(bytecode_offset, Operand(bytecode_size_table, bytecode, times_4, 0));
818 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
819 Register closure = edi;
821 Register feedback_vector = ecx;
822 Label push_stack_frame;
826 __ mov(feedback_vector,
827 FieldOperand(closure, JSFunction::kFeedbackCellOffset));
828 __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
829 __ JumpIfRoot(feedback_vector, RootIndex::kUndefinedValue, &push_stack_frame);
833 MaybeTailCallOptimizedCodeSlot(masm, ecx);
836 __ mov(feedback_vector,
837 FieldOperand(closure, JSFunction::kFeedbackCellOffset));
838 __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
839 __ inc(FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
844 __ bind(&push_stack_frame);
845 FrameScope frame_scope(masm, StackFrame::MANUAL);
853 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
854 __ mov(kInterpreterBytecodeArrayRegister,
855 FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
857 GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, eax);
861 if (FLAG_debug_code) {
862 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
863 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
867 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
871 __ mov_b(FieldOperand(kInterpreterBytecodeArrayRegister,
872 BytecodeArray::kBytecodeAgeOffset),
873 Immediate(BytecodeArray::kNoAgeBytecodeAge));
876 __ push(kInterpreterBytecodeArrayRegister);
878 __ push(Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag)));
883 Register frame_size = ecx;
884 __ mov(frame_size, FieldOperand(kInterpreterBytecodeArrayRegister,
885 BytecodeArray::kFrameSizeOffset));
890 __ sub(eax, frame_size);
891 __ CompareRealStackLimit(eax);
892 __ j(above_equal, &ok);
893 __ CallRuntime(Runtime::kThrowStackOverflow);
899 __ Move(eax, masm->isolate()->factory()->undefined_value());
901 __ bind(&loop_header);
905 __ bind(&loop_check);
906 __ sub(frame_size, Immediate(kPointerSize));
907 __ j(greater_equal, &loop_header);
912 Label no_incoming_new_target_or_generator_register;
913 __ mov(eax, FieldOperand(
914 kInterpreterBytecodeArrayRegister,
915 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
917 __ j(zero, &no_incoming_new_target_or_generator_register);
918 __ mov(Operand(ebp, eax, times_pointer_size, 0), edx);
919 __ bind(&no_incoming_new_target_or_generator_register);
922 __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
923 __ mov(kInterpreterBytecodeOffsetRegister,
924 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
929 __ bind(&do_dispatch);
930 __ Move(kInterpreterDispatchTableRegister,
931 Immediate(ExternalReference::interpreter_dispatch_table_address(
933 __ movzx_b(ecx, Operand(kInterpreterBytecodeArrayRegister,
934 kInterpreterBytecodeOffsetRegister, times_1, 0));
936 kJavaScriptCallCodeStartRegister,
937 Operand(kInterpreterDispatchTableRegister, ecx, times_pointer_size, 0));
938 __ call(kJavaScriptCallCodeStartRegister);
939 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
945 __ mov(kInterpreterBytecodeArrayRegister,
946 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
947 __ mov(kInterpreterBytecodeOffsetRegister,
948 Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
949 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
953 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
954 kInterpreterBytecodeOffsetRegister, ecx,
955 kInterpreterDispatchTableRegister, &do_return);
956 __ jmp(&do_dispatch);
960 LeaveInterpreterFrame(masm, edx, ecx);
965 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
966 Register array_limit,
967 Register start_address) {
973 Label loop_header, loop_check;
975 __ bind(&loop_header);
976 __ Push(Operand(start_address, 0));
977 __ sub(start_address, Immediate(kPointerSize));
978 __ bind(&loop_check);
979 __ cmp(start_address, array_limit);
980 __ j(greater, &loop_header, Label::kNear);
984 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
985 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
986 InterpreterPushArgsMode mode) {
987 DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
996 const Register scratch = edx;
997 const Register argv = ecx;
999 Label stack_overflow;
1001 Generate_StackOverflowCheck(masm, eax, scratch, &stack_overflow,
true);
1006 __ mov(scratch, eax);
1007 __ add(scratch, Immediate(1));
1010 __ PopReturnAddressTo(eax);
1013 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1014 __ PushRoot(RootIndex::kUndefinedValue);
1015 __ sub(scratch, Immediate(1));
1019 __ shl(scratch, kPointerSizeLog2);
1021 __ add(scratch, argv);
1022 Generate_InterpreterPushArgs(masm, scratch, argv);
1026 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1028 __ PushReturnAddressFrom(eax);
1030 __ sub(eax, Immediate(1));
1031 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1032 RelocInfo::CODE_TARGET);
1034 __ PushReturnAddressFrom(eax);
1036 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1037 RelocInfo::CODE_TARGET);
1040 __ bind(&stack_overflow);
1042 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1053 void Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1054 MacroAssembler* masm, Register num_args, Register start_addr,
1055 Register scratch1, Register scratch2,
int num_slots_to_move,
1056 Label* stack_overflow) {
1072 Generate_StackOverflowCheck(masm, num_args, scratch1, stack_overflow,
true);
1076 __ lea(scratch1, Operand(num_args, times_4, kPointerSize));
1077 __ AllocateStackFrame(scratch1);
1083 for (
int i = 0;
i < num_slots_to_move + 1;
i++) {
1085 Operand(esp, num_args, times_pointer_size, (
i + 1) * kPointerSize));
1086 __ mov(Operand(esp,
i * kPointerSize), scratch1);
1092 __ mov(Operand(esp, num_args, times_pointer_size,
1093 (num_slots_to_move + 1) * kPointerSize),
1095 __ mov(scratch1, num_args);
1097 Label loop_header, loop_check;
1098 __ jmp(&loop_check);
1099 __ bind(&loop_header);
1100 __ mov(scratch2, Operand(start_addr, 0));
1101 __ mov(Operand(esp, scratch1, times_pointer_size,
1102 num_slots_to_move * kPointerSize),
1104 __ sub(start_addr, Immediate(kPointerSize));
1105 __ sub(scratch1, Immediate(1));
1106 __ bind(&loop_check);
1107 __ cmp(scratch1, Immediate(0));
1108 __ j(greater, &loop_header, Label::kNear);
1114 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1115 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1127 Label stack_overflow;
1132 Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1133 masm, eax, ecx, edx, edi,
1134 InterpreterPushArgsThenConstructDescriptor::kStackArgumentsCount,
1140 if (mode == InterpreterPushArgsMode::kArrayFunction) {
1145 __ PopReturnAddressTo(eax);
1146 __ Pop(kJavaScriptCallExtraArg1Register);
1147 __ Pop(kJavaScriptCallNewTargetRegister);
1148 __ Pop(kJavaScriptCallTargetRegister);
1149 __ PushReturnAddressFrom(eax);
1151 __ AssertFunction(kJavaScriptCallTargetRegister);
1152 __ AssertUndefinedOrAllocationSite(kJavaScriptCallExtraArg1Register, eax);
1155 __ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
1156 RelocInfo::CODE_TARGET);
1157 }
else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1159 __ PopReturnAddressTo(eax);
1161 __ Pop(kJavaScriptCallNewTargetRegister);
1162 __ Pop(kJavaScriptCallTargetRegister);
1164 __ PushReturnAddressFrom(eax);
1166 __ sub(eax, Immediate(1));
1168 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1169 RelocInfo::CODE_TARGET);
1171 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1172 __ PopReturnAddressTo(ecx);
1174 __ Pop(kJavaScriptCallNewTargetRegister);
1175 __ Pop(kJavaScriptCallTargetRegister);
1176 __ PushReturnAddressFrom(ecx);
1178 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1181 __ bind(&stack_overflow);
1182 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1186 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1189 Label builtin_trampoline, trampoline_loaded;
1190 Smi interpreter_entry_return_pc_offset(
1191 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1192 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1194 static constexpr Register scratch = ecx;
1200 __ mov(scratch, Operand(ebp, StandardFrameConstants::kFunctionOffset));
1201 __ mov(scratch, FieldOperand(scratch, JSFunction::kSharedFunctionInfoOffset));
1203 FieldOperand(scratch, SharedFunctionInfo::kFunctionDataOffset));
1205 __ CmpObjectType(scratch, INTERPRETER_DATA_TYPE, eax);
1206 __ j(not_equal, &builtin_trampoline, Label::kNear);
1209 FieldOperand(scratch, InterpreterData::kInterpreterTrampolineOffset));
1210 __ add(scratch, Immediate(Code::kHeaderSize - kHeapObjectTag));
1211 __ jmp(&trampoline_loaded, Label::kNear);
1213 __ bind(&builtin_trampoline);
1215 __ ExternalReferenceAsOperand(
1217 address_of_interpreter_entry_trampoline_instruction_start(
1221 __ bind(&trampoline_loaded);
1223 __ add(scratch, Immediate(interpreter_entry_return_pc_offset->value()));
1227 __ Move(kInterpreterDispatchTableRegister,
1228 Immediate(ExternalReference::interpreter_dispatch_table_address(
1232 __ mov(kInterpreterBytecodeArrayRegister,
1233 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1235 if (FLAG_debug_code) {
1237 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1238 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1242 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1246 __ mov(kInterpreterBytecodeOffsetRegister,
1247 Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1248 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1251 __ movzx_b(scratch, Operand(kInterpreterBytecodeArrayRegister,
1252 kInterpreterBytecodeOffsetRegister, times_1, 0));
1253 __ mov(kJavaScriptCallCodeStartRegister,
1254 Operand(kInterpreterDispatchTableRegister, scratch, times_pointer_size,
1256 __ jmp(kJavaScriptCallCodeStartRegister);
1259 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1261 __ mov(kInterpreterBytecodeArrayRegister,
1262 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1263 __ mov(kInterpreterBytecodeOffsetRegister,
1264 Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1265 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1269 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1270 kInterpreterBytecodeOffsetRegister, ecx, esi,
1274 __ mov(ecx, kInterpreterBytecodeOffsetRegister);
1276 __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp), ecx);
1278 Generate_InterpreterEnterBytecode(masm);
1281 __ bind(&if_return);
1282 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1285 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1286 Generate_InterpreterEnterBytecode(masm);
1289 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1297 FrameScope scope(masm, StackFrame::INTERNAL);
1311 for (
int j = 0; j < 4; ++j) {
1314 __ cmp(ecx, Immediate(j));
1315 __ j(not_equal, &over, Label::kNear);
1317 for (
int i = j - 1;
i >= 0; --
i) {
1319 ebp, StandardFrameConstants::kCallerSPOffset +
i * kPointerSize));
1321 for (
int i = 0;
i < 3 - j; ++
i) {
1322 __ PushRoot(RootIndex::kUndefinedValue);
1325 __ jmp(&args_done, Label::kNear);
1329 __ bind(&args_done);
1332 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1334 __ JumpIfSmi(eax, &failed, Label::kNear);
1339 scope.GenerateLeaveFrame();
1341 __ PopReturnAddressTo(edx);
1343 __ lea(esp, Operand(esp, ecx, times_pointer_size, 0));
1344 __ PushReturnAddressFrom(edx);
1356 static_assert(kJavaScriptCallCodeStartRegister == ecx,
"ABI mismatch");
1357 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
1358 __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1363 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1364 bool java_script_builtin,
1366 const RegisterConfiguration* config(RegisterConfiguration::Default());
1367 int allocatable_register_count = config->num_allocatable_general_registers();
1372 config->num_allocatable_general_registers() * kPointerSize +
1373 BuiltinContinuationFrameConstants::kFixedFrameSize),
1376 for (
int i = allocatable_register_count - 1;
i >= 0; --
i) {
1377 int code = config->GetAllocatableGeneralCode(
i);
1378 __ pop(Register::from_code(code));
1379 if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1380 __ SmiUntag(Register::from_code(code));
1385 Operand(esp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1386 const int offsetToPC =
1387 BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp - kPointerSize;
1388 __ pop(Operand(esp, offsetToPC));
1389 __ Drop(offsetToPC / kPointerSize);
1390 __ add(Operand(esp, 0), Immediate(Code::kHeaderSize - kHeapObjectTag));
1395 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1396 Generate_ContinueToBuiltinHelper(masm,
false,
false);
1399 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1400 MacroAssembler* masm) {
1401 Generate_ContinueToBuiltinHelper(masm,
false,
true);
1404 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1405 Generate_ContinueToBuiltinHelper(masm,
true,
false);
1408 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1409 MacroAssembler* masm) {
1410 Generate_ContinueToBuiltinHelper(masm,
true,
true);
1413 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1415 FrameScope scope(masm, StackFrame::INTERNAL);
1416 __ CallRuntime(Runtime::kNotifyDeoptimized);
1420 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
1421 __ mov(eax, Operand(esp, 1 * kPointerSize));
1422 __ ret(1 * kPointerSize);
1426 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1439 Label no_arg_array, no_this_arg;
1441 __ movd(xmm0, Operand(esp, eax, times_pointer_size, kPointerSize));
1443 __ LoadRoot(edx, RootIndex::kUndefinedValue);
1446 __ j(zero, &no_this_arg, Label::kNear);
1448 __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
1449 __ cmp(eax, Immediate(1));
1450 __ j(equal, &no_arg_array, Label::kNear);
1451 __ mov(edx, Operand(esp, eax, times_pointer_size, -kPointerSize));
1452 __ bind(&no_arg_array);
1454 __ bind(&no_this_arg);
1455 __ PopReturnAddressTo(ecx);
1456 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1458 __ PushReturnAddressFrom(ecx);
1477 __ JumpIfRoot(edx, RootIndex::kNullValue, &no_arguments, Label::kNear);
1478 __ JumpIfRoot(edx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
1481 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1482 RelocInfo::CODE_TARGET);
1486 __ bind(&no_arguments);
1489 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1494 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1509 __ j(not_zero, &done, Label::kNear);
1510 __ PopReturnAddressTo(edx);
1511 __ PushRoot(RootIndex::kUndefinedValue);
1512 __ PushReturnAddressFrom(edx);
1518 __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1527 __ mov(edx, Operand(esp, ecx, times_pointer_size, 0));
1528 __ mov(Operand(esp, ecx, times_pointer_size, kPointerSize), edx);
1530 __ j(not_sign, &loop);
1536 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1539 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1554 __ LoadRoot(edi, RootIndex::kUndefinedValue);
1557 __ cmp(eax, Immediate(1));
1558 __ j(below, &done, Label::kNear);
1559 __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1560 __ j(equal, &done, Label::kNear);
1561 __ mov(ecx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1562 __ cmp(eax, Immediate(3));
1563 __ j(below, &done, Label::kNear);
1564 __ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1570 __ PopReturnAddressTo(edx);
1571 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1573 __ PushReturnAddressFrom(edx);
1591 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1592 RelocInfo::CODE_TARGET);
1595 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1611 __ LoadRoot(edi, RootIndex::kUndefinedValue);
1614 __ cmp(eax, Immediate(1));
1615 __ j(below, &done, Label::kNear);
1616 __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1618 __ j(equal, &done, Label::kNear);
1619 __ mov(ecx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1620 __ cmp(eax, Immediate(3));
1621 __ j(below, &done, Label::kNear);
1622 __ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1628 __ PopReturnAddressTo(ecx);
1629 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1630 __ PushRoot(RootIndex::kUndefinedValue);
1631 __ PushReturnAddressFrom(ecx);
1654 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1655 RelocInfo::CODE_TARGET);
1658 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
1664 Label generic_array_code;
1666 if (FLAG_debug_code) {
1668 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1670 __ test(ecx, Immediate(kSmiTagMask));
1672 AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1673 __ CmpObjectType(ecx, MAP_TYPE, ecx);
1675 AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
1680 __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
1681 RelocInfo::CODE_TARGET);
1684 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1689 __ push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1697 STATIC_ASSERT(kSmiTagSize == 1);
1698 __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
1701 __ Push(Immediate(0));
1704 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1706 __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1712 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1713 __ PopReturnAddressTo(ecx);
1714 __ lea(esp, Operand(esp, edi, times_2, 1 * kPointerSize));
1715 __ PushReturnAddressFrom(ecx);
1719 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1720 Handle<Code> code) {
1737 const Register kArgumentsList = esi;
1738 const Register kArgumentsLength = ecx;
1740 __ PopReturnAddressTo(edx);
1741 __ pop(kArgumentsList);
1742 __ PushReturnAddressFrom(edx);
1744 if (masm->emit_debug_code()) {
1748 __ AssertNotSmi(kArgumentsList);
1749 __ mov(edx, FieldOperand(kArgumentsList, HeapObject::kMapOffset));
1750 __ CmpInstanceType(edx, FIXED_ARRAY_TYPE);
1752 __ CmpInstanceType(edx, FIXED_DOUBLE_ARRAY_TYPE);
1753 __ j(not_equal, &fail);
1754 __ cmp(kArgumentsLength, 0);
1758 __ Abort(AbortReason::kOperandIsNotAFixedArray);
1765 Label stack_overflow;
1766 Generate_StackOverflowCheck(masm, kArgumentsLength, edx, &stack_overflow);
1770 __ PopReturnAddressTo(edx);
1771 __ Move(eax, Immediate(0));
1772 Label done, push, loop;
1774 __ cmp(eax, kArgumentsLength);
1775 __ j(equal, &done, Label::kNear);
1777 __ mov(edi, FieldOperand(kArgumentsList, eax, times_pointer_size,
1778 FixedArray::kHeaderSize));
1779 __ CompareRoot(edi, RootIndex::kTheHoleValue);
1780 __ j(not_equal, &push, Label::kNear);
1781 __ LoadRoot(edi, RootIndex::kUndefinedValue);
1787 __ PushReturnAddressFrom(edx);
1797 __ add(eax, kArgumentsLength);
1800 __ Jump(code, RelocInfo::CODE_TARGET);
1802 __ bind(&stack_overflow);
1804 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1808 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1809 CallOrConstructMode mode,
1810 Handle<Code> code) {
1821 Register scratch = esi;
1824 if (mode == CallOrConstructMode::kConstruct) {
1825 Label new_target_constructor, new_target_not_constructor;
1826 __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
1827 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
1828 __ test_b(FieldOperand(scratch, Map::kBitFieldOffset),
1829 Immediate(Map::IsConstructorBit::kMask));
1830 __ j(not_zero, &new_target_constructor, Label::kNear);
1831 __ bind(&new_target_not_constructor);
1833 FrameScope scope(masm, StackFrame::MANUAL);
1834 __ EnterFrame(StackFrame::INTERNAL);
1837 __ CallRuntime(Runtime::kThrowNotConstructor);
1839 __ bind(&new_target_constructor);
1845 Label arguments_adaptor, arguments_done;
1846 __ mov(scratch, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1847 __ cmp(Operand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset),
1848 Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1849 __ j(equal, &arguments_adaptor, Label::kNear);
1851 __ mov(edx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1852 __ mov(edx, FieldOperand(edx, JSFunction::kSharedFunctionInfoOffset));
1853 __ movzx_w(edx, FieldOperand(
1854 edx, SharedFunctionInfo::kFormalParameterCountOffset));
1855 __ mov(scratch, ebp);
1857 __ jmp(&arguments_done, Label::kNear);
1858 __ bind(&arguments_adaptor);
1862 Operand(scratch, ArgumentsAdaptorFrameConstants::kLengthOffset));
1865 __ bind(&arguments_done);
1867 Label stack_done, stack_overflow;
1869 __ j(less_equal, &stack_done);
1871 Generate_StackOverflowCheck(masm, edx, ecx, &stack_overflow);
1877 __ PopReturnAddressTo(ecx);
1880 __ Push(Operand(scratch, edx, times_pointer_size, 1 * kPointerSize));
1882 __ j(not_zero, &loop);
1884 __ PushReturnAddressFrom(ecx);
1887 __ bind(&stack_done);
1893 __ Jump(code, RelocInfo::CODE_TARGET);
1895 __ bind(&stack_overflow);
1897 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1901 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1902 ConvertReceiverMode mode) {
1907 __ AssertFunction(edi);
1911 Label class_constructor;
1912 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1913 __ test(FieldOperand(edx, SharedFunctionInfo::kFlagsOffset),
1914 Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask));
1915 __ j(not_zero, &class_constructor);
1920 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1923 __ test(FieldOperand(edx, SharedFunctionInfo::kFlagsOffset),
1924 Immediate(SharedFunctionInfo::IsNativeBit::kMask |
1925 SharedFunctionInfo::IsStrictBit::kMask));
1926 __ j(not_zero, &done_convert);
1935 if (mode == ConvertReceiverMode::kNullOrUndefined) {
1937 __ LoadGlobalProxy(ecx);
1939 Label convert_to_object, convert_receiver;
1940 __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
1941 __ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
1942 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1943 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
1944 __ j(above_equal, &done_convert);
1946 __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
1947 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1948 Label convert_global_proxy;
1949 __ JumpIfRoot(ecx, RootIndex::kUndefinedValue, &convert_global_proxy,
1951 __ JumpIfNotRoot(ecx, RootIndex::kNullValue, &convert_to_object,
1953 __ bind(&convert_global_proxy);
1956 __ LoadGlobalProxy(ecx);
1958 __ jmp(&convert_receiver);
1960 __ bind(&convert_to_object);
1965 FrameScope scope(masm, StackFrame::INTERNAL);
1971 __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1972 RelocInfo::CODE_TARGET);
1979 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1980 __ bind(&convert_receiver);
1982 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx);
1984 __ bind(&done_convert);
1994 ecx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
1995 ParameterCount actual(eax);
1996 ParameterCount expected(ecx);
1997 __ InvokeFunctionCode(edi, no_reg, expected, actual, JUMP_FUNCTION);
1999 __ bind(&class_constructor);
2001 FrameScope frame(masm, StackFrame::INTERNAL);
2003 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2009 void Generate_PushBoundArguments(MacroAssembler* masm) {
2019 Label no_bound_arguments;
2020 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2021 __ mov(edx, FieldOperand(ecx, FixedArray::kLengthOffset));
2024 __ j(zero, &no_bound_arguments);
2037 __ lea(ecx, Operand(edx, times_pointer_size, 0));
2042 __ CompareRealStackLimit(esp);
2043 __ j(above_equal, &done, Label::kNear);
2045 __ lea(esp, Operand(esp, edx, times_pointer_size, 0));
2047 FrameScope scope(masm, StackFrame::MANUAL);
2048 __ EnterFrame(StackFrame::INTERNAL);
2049 __ CallRuntime(Runtime::kThrowStackOverflow);
2061 __ lea(edx, Operand(esp, edx, times_pointer_size, 0));
2063 __ movd(xmm1, Operand(edx, ecx, times_pointer_size, 0));
2064 __ movd(Operand(esp, ecx, times_pointer_size, 0), xmm1);
2073 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2074 __ mov(edx, FieldOperand(ecx, FixedArray::kLengthOffset));
2078 __ movd(xmm1, FieldOperand(ecx, edx, times_pointer_size,
2079 FixedArray::kHeaderSize));
2080 __ movd(Operand(esp, eax, times_pointer_size, 0), xmm1);
2081 __ lea(eax, Operand(eax, 1));
2082 __ j(greater, &loop);
2091 __ bind(&no_bound_arguments);
2098 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2103 __ AssertBoundFunction(edi);
2106 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
2107 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx);
2110 Generate_PushBoundArguments(masm);
2113 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2114 __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2115 RelocInfo::CODE_TARGET);
2119 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2125 Label non_callable, non_function, non_smi, non_jsfunction,
2126 non_jsboundfunction;
2127 __ JumpIfSmi(edi, &non_callable);
2129 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2130 __ j(not_equal, &non_jsfunction);
2131 __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2132 RelocInfo::CODE_TARGET);
2134 __ bind(&non_jsfunction);
2135 __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2136 __ j(not_equal, &non_jsboundfunction);
2137 __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2138 RelocInfo::CODE_TARGET);
2141 __ bind(&non_jsboundfunction);
2142 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2143 Immediate(Map::IsCallableBit::kMask));
2144 __ j(zero, &non_callable);
2147 __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2148 __ j(not_equal, &non_function);
2149 __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
2153 __ bind(&non_function);
2155 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2157 __ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, edi);
2158 __ Jump(masm->isolate()->builtins()->CallFunction(
2159 ConvertReceiverMode::kNotNullOrUndefined),
2160 RelocInfo::CODE_TARGET);
2163 __ bind(&non_callable);
2165 FrameScope scope(masm, StackFrame::INTERNAL);
2167 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2172 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2178 __ AssertConstructor(edi);
2179 __ AssertFunction(edi);
2181 Label call_generic_stub;
2184 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2185 __ test(FieldOperand(ecx, SharedFunctionInfo::kFlagsOffset),
2186 Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2187 __ j(zero, &call_generic_stub, Label::kNear);
2191 __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2192 __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2193 RelocInfo::CODE_TARGET);
2195 __ bind(&call_generic_stub);
2198 __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2199 __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2200 RelocInfo::CODE_TARGET);
2204 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2210 __ AssertConstructor(edi);
2211 __ AssertBoundFunction(edi);
2214 Generate_PushBoundArguments(masm);
2220 __ j(not_equal, &done, Label::kNear);
2221 __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2226 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2227 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2231 void Builtins::Generate_Construct(MacroAssembler* masm) {
2240 Label non_constructor, non_proxy, non_jsfunction, non_jsboundfunction;
2241 __ JumpIfSmi(edi, &non_constructor);
2244 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
2245 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2246 Immediate(Map::IsConstructorBit::kMask));
2247 __ j(zero, &non_constructor);
2250 __ CmpInstanceType(ecx, JS_FUNCTION_TYPE);
2251 __ j(not_equal, &non_jsfunction);
2252 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2253 RelocInfo::CODE_TARGET);
2257 __ bind(&non_jsfunction);
2258 __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2259 __ j(not_equal, &non_jsboundfunction);
2260 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2261 RelocInfo::CODE_TARGET);
2264 __ bind(&non_jsboundfunction);
2265 __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2266 __ j(not_equal, &non_proxy);
2267 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2268 RelocInfo::CODE_TARGET);
2271 __ bind(&non_proxy);
2274 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2276 __ LoadGlobalFunction(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, edi);
2277 __ Jump(masm->isolate()->builtins()->CallFunction(),
2278 RelocInfo::CODE_TARGET);
2283 __ bind(&non_constructor);
2284 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2285 RelocInfo::CODE_TARGET);
2288 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2296 const Register kExpectedNumberOfArgumentsRegister = ecx;
2298 Label invoke, dont_adapt_arguments, stack_overflow, enough, too_few;
2299 __ cmp(kExpectedNumberOfArgumentsRegister,
2300 SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2301 __ j(equal, &dont_adapt_arguments);
2302 __ cmp(eax, kExpectedNumberOfArgumentsRegister);
2303 __ j(less, &too_few);
2307 EnterArgumentsAdaptorFrame(masm);
2310 Generate_StackOverflowCheck(masm, kExpectedNumberOfArgumentsRegister, edi,
2314 const int offset = StandardFrameConstants::kCallerSPOffset;
2315 __ lea(edi, Operand(ebp, eax, times_4, offset));
2321 __ push(Operand(edi, 0));
2322 __ sub(edi, Immediate(kPointerSize));
2323 __ cmp(eax, kExpectedNumberOfArgumentsRegister);
2331 EnterArgumentsAdaptorFrame(masm);
2334 Generate_StackOverflowCheck(masm, kExpectedNumberOfArgumentsRegister, edi,
2338 __ movd(xmm0, kExpectedNumberOfArgumentsRegister);
2341 const int offset = StandardFrameConstants::kCallerSPOffset;
2342 __ lea(edi, Operand(ebp, eax, times_4, offset));
2344 __ sub(kExpectedNumberOfArgumentsRegister, eax);
2347 __ sub(eax, Immediate(1));
2352 __ push(Operand(edi, 0));
2353 __ sub(edi, Immediate(kPointerSize));
2355 __ j(not_zero, ©);
2361 __ Push(Immediate(masm->isolate()->factory()->undefined_value()));
2362 __ cmp(eax, kExpectedNumberOfArgumentsRegister);
2372 __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2376 static_assert(kJavaScriptCallCodeStartRegister == ecx,
"ABI mismatch");
2377 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
2378 __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2382 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2385 LeaveArgumentsAdaptorFrame(masm);
2391 __ bind(&dont_adapt_arguments);
2392 static_assert(kJavaScriptCallCodeStartRegister == ecx,
"ABI mismatch");
2393 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
2394 __ add(ecx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2397 __ bind(&stack_overflow);
2399 FrameScope frame(masm, StackFrame::MANUAL);
2400 __ CallRuntime(Runtime::kThrowStackOverflow);
2405 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2407 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2408 __ mov(eax, Operand(eax, JavaScriptFrameConstants::kFunctionOffset));
2411 FrameScope scope(masm, StackFrame::INTERNAL);
2414 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2419 __ cmp(eax, Immediate(0));
2420 __ j(not_equal, &skip, Label::kNear);
2430 __ mov(ecx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2433 __ mov(ecx, Operand(ecx, FixedArray::OffsetOfElementAt(
2434 DeoptimizationData::kOsrPcOffsetIndex) -
2439 __ lea(eax, Operand(eax, ecx, times_1, Code::kHeaderSize - kHeapObjectTag));
2442 __ mov(Operand(esp, 0), eax);
2448 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2451 __ SmiTag(kWasmCompileLazyFuncIndexRegister);
2453 HardAbortScope hard_abort(masm);
2454 FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2459 static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
2460 arraysize(wasm::kGpParamRegisters),
2461 "frame size mismatch");
2462 for (Register reg : wasm::kGpParamRegisters) {
2465 static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs ==
2466 arraysize(wasm::kFpParamRegisters),
2467 "frame size mismatch");
2468 __ sub(esp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2470 for (DoubleRegister reg : wasm::kFpParamRegisters) {
2471 __ movdqu(Operand(esp, offset), reg);
2472 offset += kSimd128Size;
2476 __ Push(kWasmInstanceRegister);
2478 __ Push(kWasmCompileLazyFuncIndexRegister);
2480 __ mov(ecx, FieldOperand(kWasmInstanceRegister,
2481 WasmInstanceObject::kCEntryStubOffset));
2484 __ Move(kContextRegister, Smi::zero());
2488 __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, ecx);
2491 __ mov(edi, kReturnRegister0);
2494 for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
2495 offset -= kSimd128Size;
2496 __ movdqu(reg, Operand(esp, offset));
2498 DCHECK_EQ(0, offset);
2499 __ add(esp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2500 for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
2508 void Builtins::Generate_CEntry(MacroAssembler* masm,
int result_size,
2509 SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2510 bool builtin_exit_frame) {
2521 STATIC_ASSERT(eax == kRuntimeCallArgCountRegister);
2522 STATIC_ASSERT(ecx == kRuntimeCallArgvRegister);
2523 STATIC_ASSERT(edx == kRuntimeCallFunctionRegister);
2524 STATIC_ASSERT(esi == kContextRegister);
2525 STATIC_ASSERT(edi == kJSFunctionRegister);
2527 DCHECK(!AreAliased(kRuntimeCallArgCountRegister, kRuntimeCallArgvRegister,
2528 kRuntimeCallFunctionRegister, kContextRegister,
2529 kJSFunctionRegister, kRootRegister));
2535 int arg_stack_space = 3;
2538 if (argv_mode == kArgvInRegister) {
2539 DCHECK(save_doubles == kDontSaveFPRegs);
2540 DCHECK(!builtin_exit_frame);
2541 __ EnterApiExitFrame(arg_stack_space, edi);
2548 arg_stack_space, save_doubles == kSaveFPRegs,
2549 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2561 if (FLAG_debug_code) {
2562 __ CheckStackAlignment();
2565 __ mov(Operand(esp, 0 * kPointerSize), edi);
2566 __ mov(Operand(esp, 1 * kPointerSize), esi);
2567 __ Move(ecx, Immediate(ExternalReference::isolate_address(masm->isolate())));
2568 __ mov(Operand(esp, 2 * kPointerSize), ecx);
2569 __ call(kRuntimeCallFunctionRegister);
2574 Label exception_returned;
2575 __ CompareRoot(eax, RootIndex::kException);
2576 __ j(equal, &exception_returned);
2580 if (FLAG_debug_code) {
2582 __ LoadRoot(edx, RootIndex::kTheHoleValue);
2584 ExternalReference pending_exception_address = ExternalReference::Create(
2585 IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2586 __ cmp(edx, __ ExternalReferenceAsOperand(pending_exception_address, ecx));
2588 __ j(equal, &okay, Label::kNear);
2595 __ LeaveExitFrame(save_doubles == kSaveFPRegs, argv_mode == kArgvOnStack);
2599 __ bind(&exception_returned);
2601 ExternalReference pending_handler_context_address = ExternalReference::Create(
2602 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2603 ExternalReference pending_handler_entrypoint_address =
2604 ExternalReference::Create(
2605 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2606 ExternalReference pending_handler_fp_address = ExternalReference::Create(
2607 IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2608 ExternalReference pending_handler_sp_address = ExternalReference::Create(
2609 IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2613 ExternalReference find_handler =
2614 ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2616 FrameScope scope(masm, StackFrame::MANUAL);
2617 __ PrepareCallCFunction(3, eax);
2618 __ mov(Operand(esp, 0 * kPointerSize), Immediate(0));
2619 __ mov(Operand(esp, 1 * kPointerSize), Immediate(0));
2621 Immediate(ExternalReference::isolate_address(masm->isolate())));
2622 __ mov(Operand(esp, 2 * kPointerSize), esi);
2623 __ CallCFunction(find_handler, 3);
2627 __ mov(esp, __ ExternalReferenceAsOperand(pending_handler_sp_address, esi));
2628 __ mov(ebp, __ ExternalReferenceAsOperand(pending_handler_fp_address, esi));
2630 __ ExternalReferenceAsOperand(pending_handler_context_address, esi));
2636 __ j(zero, &skip, Label::kNear);
2637 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
2641 __ mov(edi, __ ExternalReferenceAsOperand(pending_handler_entrypoint_address,
2646 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2647 Label check_negative, process_64_bits, done;
2650 const int kArgumentOffset = 4 * kPointerSize;
2652 MemOperand mantissa_operand(MemOperand(esp, kArgumentOffset));
2653 MemOperand exponent_operand(
2654 MemOperand(esp, kArgumentOffset + kDoubleSize / 2));
2657 MemOperand return_operand = mantissa_operand;
2659 Register scratch1 = ebx;
2663 Register result_reg = eax;
2667 Register save_reg = eax;
2672 __ mov(scratch1, mantissa_operand);
2673 if (CpuFeatures::IsSupported(SSE3)) {
2674 CpuFeatureScope scope(masm, SSE3);
2676 __ fld_d(mantissa_operand);
2678 __ mov(ecx, exponent_operand);
2680 __ and_(ecx, HeapNumber::kExponentMask);
2681 __ shr(ecx, HeapNumber::kExponentShift);
2682 __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
2683 __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
2684 __ j(below, &process_64_bits);
2687 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
2688 if (CpuFeatures::IsSupported(SSE3)) {
2691 __ sub(ecx, Immediate(delta));
2692 __ xor_(result_reg, result_reg);
2693 __ cmp(ecx, Immediate(31));
2695 __ shl_cl(scratch1);
2696 __ jmp(&check_negative);
2698 __ bind(&process_64_bits);
2699 if (CpuFeatures::IsSupported(SSE3)) {
2700 CpuFeatureScope scope(masm, SSE3);
2702 __ sub(esp, Immediate(kDoubleSize));
2704 __ fisttp_d(Operand(esp, 0));
2705 __ mov(result_reg, Operand(esp, 0));
2706 __ add(esp, Immediate(kDoubleSize));
2710 __ sub(ecx, Immediate(delta));
2712 __ mov(result_reg, exponent_operand);
2714 Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
2716 Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
2717 __ shrd_cl(scratch1, result_reg);
2718 __ shr_cl(result_reg);
2719 __ test(ecx, Immediate(32));
2720 __ cmov(not_equal, scratch1, result_reg);
2724 __ bind(&check_negative);
2725 __ mov(result_reg, scratch1);
2727 __ cmp(exponent_operand, Immediate(0));
2728 __ cmov(greater, result_reg, scratch1);
2732 __ mov(return_operand, result_reg);
2739 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2740 const Register exponent = eax;
2741 const Register scratch = ecx;
2742 const XMMRegister double_result = xmm3;
2743 const XMMRegister double_base = xmm2;
2744 const XMMRegister double_exponent = xmm1;
2745 const XMMRegister double_scratch = xmm4;
2747 Label call_runtime, done, exponent_not_smi, int_exponent;
2750 __ mov(scratch, Immediate(1));
2751 __ Cvtsi2sd(double_result, scratch);
2753 Label fast_power, try_arithmetic_simplification;
2754 __ DoubleToI(exponent, double_exponent, double_scratch,
2755 &try_arithmetic_simplification, &try_arithmetic_simplification);
2756 __ jmp(&int_exponent);
2758 __ bind(&try_arithmetic_simplification);
2760 __ cvttsd2si(exponent, Operand(double_exponent));
2761 __ cmp(exponent, Immediate(0x1));
2762 __ j(overflow, &call_runtime);
2765 Label fast_power_failed;
2766 __ bind(&fast_power);
2769 __ sub(esp, Immediate(kDoubleSize));
2770 __ movsd(Operand(esp, 0), double_exponent);
2771 __ fld_d(Operand(esp, 0));
2772 __ movsd(Operand(esp, 0), double_base);
2773 __ fld_d(Operand(esp, 0));
2792 __ test_b(eax, Immediate(0x5F));
2793 __ j(not_zero, &fast_power_failed, Label::kNear);
2794 __ fstp_d(Operand(esp, 0));
2795 __ movsd(double_result, Operand(esp, 0));
2796 __ add(esp, Immediate(kDoubleSize));
2799 __ bind(&fast_power_failed);
2801 __ add(esp, Immediate(kDoubleSize));
2802 __ jmp(&call_runtime);
2805 __ bind(&int_exponent);
2806 const XMMRegister double_scratch2 = double_exponent;
2807 __ mov(scratch, exponent);
2808 __ movsd(double_scratch, double_base);
2809 __ movsd(double_scratch2, double_result);
2812 Label no_neg, while_true, while_false;
2813 __ test(scratch, scratch);
2814 __ j(positive, &no_neg, Label::kNear);
2818 __ j(zero, &while_false, Label::kNear);
2822 __ j(above, &while_true, Label::kNear);
2823 __ movsd(double_result, double_scratch);
2824 __ j(zero, &while_false, Label::kNear);
2826 __ bind(&while_true);
2828 __ mulsd(double_scratch, double_scratch);
2829 __ j(above, &while_true, Label::kNear);
2830 __ mulsd(double_result, double_scratch);
2831 __ j(not_zero, &while_true);
2833 __ bind(&while_false);
2836 __ test(exponent, exponent);
2837 __ j(positive, &done);
2838 __ divsd(double_scratch2, double_result);
2839 __ movsd(double_result, double_scratch2);
2842 __ xorps(double_scratch2, double_scratch2);
2843 __ ucomisd(double_scratch2, double_result);
2847 __ j(not_equal, &done);
2848 __ Cvtsi2sd(double_exponent, exponent);
2851 __ bind(&call_runtime);
2853 AllowExternalCallThatCantCauseGC scope(masm);
2854 __ PrepareCallCFunction(4, scratch);
2855 __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
2856 __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
2857 __ CallCFunction(ExternalReference::power_double_double_function(), 4);
2861 __ sub(esp, Immediate(kDoubleSize));
2862 __ fstp_d(Operand(esp, 0));
2863 __ movsd(double_result, Operand(esp, 0));
2864 __ add(esp, Immediate(kDoubleSize));
2872 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2873 ElementsKind kind) {
2874 Label not_zero_case, not_one_case;
2875 Label normal_sequence;
2878 __ j(not_zero, ¬_zero_case);
2879 __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2881 RelocInfo::CODE_TARGET);
2883 __ bind(¬_zero_case);
2885 __ j(greater, ¬_one_case);
2887 if (IsFastPackedElementsKind(kind)) {
2890 __ mov(ecx, Operand(esp, kPointerSize));
2892 __ j(zero, &normal_sequence);
2894 __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2895 masm->isolate(), GetHoleyElementsKind(kind))
2897 RelocInfo::CODE_TARGET);
2900 __ bind(&normal_sequence);
2902 CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2904 RelocInfo::CODE_TARGET);
2906 __ bind(¬_one_case);
2909 __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue);
2910 Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
2911 __ Jump(code, RelocInfo::CODE_TARGET);
2916 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2924 if (FLAG_debug_code) {
2929 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
2931 __ test(ecx, Immediate(kSmiTagMask));
2932 __ Assert(not_zero, AbortReason::kUnexpectedInitialMapForArrayFunction);
2933 __ CmpObjectType(ecx, MAP_TYPE, ecx);
2934 __ Assert(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
2938 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
2942 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
2944 __ DecodeField<Map::ElementsKindBits>(ecx);
2946 if (FLAG_debug_code) {
2948 __ cmp(ecx, Immediate(PACKED_ELEMENTS));
2950 __ cmp(ecx, Immediate(HOLEY_ELEMENTS));
2953 AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2957 Label fast_elements_case;
2958 __ cmp(ecx, Immediate(PACKED_ELEMENTS));
2959 __ j(equal, &fast_elements_case);
2960 GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2962 __ bind(&fast_elements_case);
2963 GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2971 #endif // V8_TARGET_ARCH_IA32