10 #include "src/base/bits.h" 11 #include "src/base/division-by-constant.h" 12 #include "src/bootstrapper.h" 13 #include "src/callable.h" 14 #include "src/code-factory.h" 15 #include "src/code-stubs.h" 16 #include "src/counters.h" 17 #include "src/debug/debug.h" 18 #include "src/external-reference-table.h" 19 #include "src/frames-inl.h" 20 #include "src/macro-assembler.h" 21 #include "src/register-configuration.h" 22 #include "src/runtime/runtime.h" 23 #include "src/snapshot/embedded-data.h" 24 #include "src/snapshot/snapshot.h" 25 #include "src/wasm/wasm-code-manager.h" 30 #include "src/ppc/macro-assembler-ppc.h" 36 MacroAssembler::MacroAssembler(Isolate* isolate,
37 const AssemblerOptions& options,
void* buffer,
38 int size, CodeObjectRequired create_code_object)
39 : TurboAssembler(isolate, options, buffer, size, create_code_object) {
40 if (create_code_object == CodeObjectRequired::kYes) {
46 code_object_ = Handle<HeapObject>::New(
47 *isolate->factory()->NewSelfReferenceMarker(), isolate);
51 int TurboAssembler::RequiredStackSizeForCallerSaved(SaveFPRegsMode fp_mode,
54 Register exclusion3)
const {
56 RegList exclusions = 0;
57 if (exclusion1 != no_reg) {
58 exclusions |= exclusion1.bit();
59 if (exclusion2 != no_reg) {
60 exclusions |= exclusion2.bit();
61 if (exclusion3 != no_reg) {
62 exclusions |= exclusion3.bit();
67 RegList list = kJSCallerSaved & ~exclusions;
68 bytes += NumRegs(list) * kPointerSize;
70 if (fp_mode == kSaveFPRegs) {
71 bytes += kNumCallerSavedDoubles * kDoubleSize;
77 int TurboAssembler::PushCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
78 Register exclusion2, Register exclusion3) {
80 RegList exclusions = 0;
81 if (exclusion1 != no_reg) {
82 exclusions |= exclusion1.bit();
83 if (exclusion2 != no_reg) {
84 exclusions |= exclusion2.bit();
85 if (exclusion3 != no_reg) {
86 exclusions |= exclusion3.bit();
91 RegList list = kJSCallerSaved & ~exclusions;
93 bytes += NumRegs(list) * kPointerSize;
95 if (fp_mode == kSaveFPRegs) {
96 MultiPushDoubles(kCallerSavedDoubles);
97 bytes += kNumCallerSavedDoubles * kDoubleSize;
103 int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1,
104 Register exclusion2, Register exclusion3) {
106 if (fp_mode == kSaveFPRegs) {
107 MultiPopDoubles(kCallerSavedDoubles);
108 bytes += kNumCallerSavedDoubles * kDoubleSize;
111 RegList exclusions = 0;
112 if (exclusion1 != no_reg) {
113 exclusions |= exclusion1.bit();
114 if (exclusion2 != no_reg) {
115 exclusions |= exclusion2.bit();
116 if (exclusion3 != no_reg) {
117 exclusions |= exclusion3.bit();
122 RegList list = kJSCallerSaved & ~exclusions;
124 bytes += NumRegs(list) * kPointerSize;
129 void TurboAssembler::Jump(Register target) {
134 void TurboAssembler::LoadFromConstantsTable(Register destination,
135 int constant_index) {
136 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kBuiltinsConstantsTable));
139 FixedArray::kHeaderSize + constant_index * kPointerSize - kHeapObjectTag;
141 CHECK(is_uint19(offset));
142 DCHECK_NE(destination, r0);
143 LoadRoot(destination, RootIndex::kBuiltinsConstantsTable);
144 LoadP(destination, MemOperand(destination, offset), r0);
147 void TurboAssembler::LoadRootRelative(Register destination, int32_t offset) {
148 LoadP(destination, MemOperand(kRootRegister, offset), r0);
151 void TurboAssembler::LoadRootRegisterOffset(Register destination,
154 mr(destination, kRootRegister);
156 addi(destination, kRootRegister, Operand(offset));
160 void MacroAssembler::JumpToJSEntry(Register target) {
165 void TurboAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
166 Condition cond, CRegister cr) {
169 if (cond != al) b(NegateCondition(cond), &skip, cr);
171 DCHECK(rmode == RelocInfo::CODE_TARGET || rmode == RelocInfo::RUNTIME_ENTRY);
173 mov(ip, Operand(target, rmode));
180 void TurboAssembler::Jump(Address target, RelocInfo::Mode rmode, Condition cond,
182 DCHECK(!RelocInfo::IsCodeTarget(rmode));
183 Jump(static_cast<intptr_t>(target), rmode, cond, cr);
186 void TurboAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
187 Condition cond, CRegister cr) {
188 DCHECK(RelocInfo::IsCodeTarget(rmode));
190 if (FLAG_embedded_builtins) {
191 if (root_array_available_ && options().isolate_independent_code) {
192 Register scratch = ip;
193 IndirectLoadConstant(scratch, code);
194 addi(scratch, scratch, Operand(Code::kHeaderSize - kHeapObjectTag));
196 if (cond != al) b(NegateCondition(cond), &skip, cr);
200 }
else if (options().inline_offheap_trampolines) {
201 int builtin_index = Builtins::kNoBuiltinId;
202 if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) &&
203 Builtins::IsIsolateIndependent(builtin_index)) {
205 RecordCommentForOffHeapTrampoline(builtin_index);
206 EmbeddedData d = EmbeddedData::FromBlob();
207 Address entry = d.InstructionStartOfBuiltin(builtin_index);
210 mov(ip, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
212 if (cond != al) b(NegateCondition(cond), &skip, cr);
219 Jump(static_cast<intptr_t>(code.address()), rmode, cond, cr);
222 void TurboAssembler::Call(Register target) {
223 BlockTrampolinePoolScope block_trampoline_pool(
this);
229 void MacroAssembler::CallJSEntry(Register target) {
234 int MacroAssembler::CallSizeNotPredictableCodeSize(Address target,
235 RelocInfo::Mode rmode,
237 return (2 + kMovInstructionsNoConstantPool) * kInstrSize;
240 void TurboAssembler::Call(Address target, RelocInfo::Mode rmode,
242 BlockTrampolinePoolScope block_trampoline_pool(
this);
251 mov(ip, Operand(target, rmode));
256 void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode,
258 BlockTrampolinePoolScope block_trampoline_pool(
this);
259 DCHECK(RelocInfo::IsCodeTarget(rmode));
261 if (FLAG_embedded_builtins) {
262 if (root_array_available_ && options().isolate_independent_code) {
265 IndirectLoadConstant(ip, code);
266 addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
268 if (cond != al) b(NegateCondition(cond), &skip);
272 }
else if (options().inline_offheap_trampolines) {
273 int builtin_index = Builtins::kNoBuiltinId;
274 if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) &&
275 Builtins::IsIsolateIndependent(builtin_index)) {
277 RecordCommentForOffHeapTrampoline(builtin_index);
278 DCHECK(Builtins::IsBuiltinId(builtin_index));
279 EmbeddedData d = EmbeddedData::FromBlob();
280 Address entry = d.InstructionStartOfBuiltin(builtin_index);
283 mov(ip, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
285 if (cond != al) b(NegateCondition(cond), &skip);
292 Call(code.address(), rmode, cond);
295 void TurboAssembler::Drop(
int count) {
297 Add(sp, sp, count * kPointerSize, r0);
301 void TurboAssembler::Drop(Register count, Register scratch) {
302 ShiftLeftImm(scratch, count, Operand(kPointerSizeLog2));
303 add(sp, sp, scratch);
306 void TurboAssembler::Call(Label* target) { b(target, SetLK); }
308 void TurboAssembler::Push(Handle<HeapObject> handle) {
309 mov(r0, Operand(handle));
313 void TurboAssembler::Push(Smi smi) {
314 mov(r0, Operand(smi));
318 void TurboAssembler::Move(Register dst, Handle<HeapObject> value) {
319 if (FLAG_embedded_builtins) {
320 if (root_array_available_ && options().isolate_independent_code) {
321 IndirectLoadConstant(dst, value);
325 mov(dst, Operand(value));
328 void TurboAssembler::Move(Register dst, ExternalReference reference) {
329 if (FLAG_embedded_builtins) {
330 if (root_array_available_ && options().isolate_independent_code) {
331 IndirectLoadExternalReference(dst, reference);
335 mov(dst, Operand(reference));
338 void TurboAssembler::Move(Register dst, Register src, Condition cond) {
345 void TurboAssembler::Move(DoubleRegister dst, DoubleRegister src) {
351 void TurboAssembler::MultiPush(RegList regs, Register location) {
352 int16_t num_to_push = base::bits::CountPopulation(regs);
353 int16_t stack_offset = num_to_push * kPointerSize;
355 subi(location, location, Operand(stack_offset));
356 for (int16_t
i = Register::kNumRegisters - 1;
i >= 0;
i--) {
357 if ((regs & (1 <<
i)) != 0) {
358 stack_offset -= kPointerSize;
359 StoreP(ToRegister(
i), MemOperand(location, stack_offset));
364 void TurboAssembler::MultiPop(RegList regs, Register location) {
365 int16_t stack_offset = 0;
367 for (int16_t
i = 0;
i < Register::kNumRegisters;
i++) {
368 if ((regs & (1 <<
i)) != 0) {
369 LoadP(ToRegister(
i), MemOperand(location, stack_offset));
370 stack_offset += kPointerSize;
373 addi(location, location, Operand(stack_offset));
376 void TurboAssembler::MultiPushDoubles(RegList dregs, Register location) {
377 int16_t num_to_push = base::bits::CountPopulation(dregs);
378 int16_t stack_offset = num_to_push * kDoubleSize;
380 subi(location, location, Operand(stack_offset));
381 for (int16_t
i = DoubleRegister::kNumRegisters - 1;
i >= 0;
i--) {
382 if ((dregs & (1 <<
i)) != 0) {
383 DoubleRegister dreg = DoubleRegister::from_code(
i);
384 stack_offset -= kDoubleSize;
385 stfd(dreg, MemOperand(location, stack_offset));
390 void TurboAssembler::MultiPopDoubles(RegList dregs, Register location) {
391 int16_t stack_offset = 0;
393 for (int16_t
i = 0;
i < DoubleRegister::kNumRegisters;
i++) {
394 if ((dregs & (1 <<
i)) != 0) {
395 DoubleRegister dreg = DoubleRegister::from_code(
i);
396 lfd(dreg, MemOperand(location, stack_offset));
397 stack_offset += kDoubleSize;
400 addi(location, location, Operand(stack_offset));
403 void TurboAssembler::LoadRoot(Register destination, RootIndex index,
407 MemOperand(kRootRegister, RootRegisterOffsetForRootIndex(index)), r0);
410 void MacroAssembler::RecordWriteField(Register
object,
int offset,
411 Register value, Register dst,
412 LinkRegisterStatus lr_status,
413 SaveFPRegsMode save_fp,
414 RememberedSetAction remembered_set_action,
415 SmiCheck smi_check) {
421 if (smi_check == INLINE_SMI_CHECK) {
422 JumpIfSmi(value, &done);
427 DCHECK(IsAligned(offset, kPointerSize));
429 Add(dst,
object, offset - kHeapObjectTag, r0);
430 if (emit_debug_code()) {
432 andi(r0, dst, Operand(kPointerSize - 1));
434 stop(
"Unaligned cell in write barrier");
438 RecordWrite(
object, dst, value, lr_status, save_fp, remembered_set_action,
445 if (emit_debug_code()) {
446 mov(value, Operand(bit_cast<intptr_t>(kZapValue + 4)));
447 mov(dst, Operand(bit_cast<intptr_t>(kZapValue + 8)));
451 void TurboAssembler::SaveRegisters(RegList registers) {
452 DCHECK_GT(NumRegs(registers), 0);
454 for (
int i = 0;
i < Register::kNumRegisters; ++
i) {
455 if ((registers >>
i) & 1u) {
456 regs |= Register::from_code(
i).bit();
463 void TurboAssembler::RestoreRegisters(RegList registers) {
464 DCHECK_GT(NumRegs(registers), 0);
466 for (
int i = 0;
i < Register::kNumRegisters; ++
i) {
467 if ((registers >>
i) & 1u) {
468 regs |= Register::from_code(
i).bit();
474 void TurboAssembler::CallRecordWriteStub(
475 Register
object, Register address,
476 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
478 object, address, remembered_set_action, fp_mode,
479 isolate()->builtins()->builtin_handle(Builtins::kRecordWrite),
483 void TurboAssembler::CallRecordWriteStub(
484 Register
object, Register address,
485 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
486 Address wasm_target) {
487 CallRecordWriteStub(
object, address, remembered_set_action, fp_mode,
488 Handle<Code>::null(), wasm_target);
491 void TurboAssembler::CallRecordWriteStub(
492 Register
object, Register address,
493 RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode,
494 Handle<Code> code_target, Address wasm_target) {
495 DCHECK_NE(code_target.is_null(), wasm_target == kNullAddress);
501 RecordWriteDescriptor descriptor;
502 RegList registers = descriptor.allocatable_registers();
504 SaveRegisters(registers);
506 Register object_parameter(
507 descriptor.GetRegisterParameter(RecordWriteDescriptor::kObject));
508 Register slot_parameter(
509 descriptor.GetRegisterParameter(RecordWriteDescriptor::kSlot));
510 Register remembered_set_parameter(
511 descriptor.GetRegisterParameter(RecordWriteDescriptor::kRememberedSet));
512 Register fp_mode_parameter(
513 descriptor.GetRegisterParameter(RecordWriteDescriptor::kFPMode));
519 pop(object_parameter);
521 Move(remembered_set_parameter, Smi::FromEnum(remembered_set_action));
522 Move(fp_mode_parameter, Smi::FromEnum(fp_mode));
523 if (code_target.is_null()) {
524 Call(wasm_target, RelocInfo::WASM_STUB_CALL);
526 Call(code_target, RelocInfo::CODE_TARGET);
529 RestoreRegisters(registers);
535 void MacroAssembler::RecordWrite(Register
object, Register address,
536 Register value, LinkRegisterStatus lr_status,
537 SaveFPRegsMode fp_mode,
538 RememberedSetAction remembered_set_action,
539 SmiCheck smi_check) {
540 DCHECK(
object != value);
541 if (emit_debug_code()) {
542 LoadP(r0, MemOperand(address));
544 Check(eq, AbortReason::kWrongAddressOrValuePassedToRecordWrite);
547 if (remembered_set_action == OMIT_REMEMBERED_SET &&
548 !FLAG_incremental_marking) {
556 if (smi_check == INLINE_SMI_CHECK) {
557 JumpIfSmi(value, &done);
562 MemoryChunk::kPointersToHereAreInterestingMask, eq, &done);
563 CheckPageFlag(
object,
565 MemoryChunk::kPointersFromHereAreInterestingMask, eq, &done);
568 if (lr_status == kLRHasNotBeenSaved) {
572 CallRecordWriteStub(
object, address, remembered_set_action, fp_mode);
573 if (lr_status == kLRHasNotBeenSaved) {
581 isolate()->counters()->write_barriers_static()->Increment();
582 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip,
587 if (emit_debug_code()) {
588 mov(address, Operand(bit_cast<intptr_t>(kZapValue + 12)));
589 mov(value, Operand(bit_cast<intptr_t>(kZapValue + 16)));
593 void TurboAssembler::PushCommonFrame(Register marker_reg) {
596 if (FLAG_enable_embedded_constant_pool) {
597 if (marker_reg.is_valid()) {
598 Push(r0, fp, kConstantPoolRegister, marker_reg);
601 Push(r0, fp, kConstantPoolRegister);
605 if (marker_reg.is_valid()) {
606 Push(r0, fp, marker_reg);
613 addi(fp, sp, Operand(fp_delta * kPointerSize));
616 void TurboAssembler::PushStandardFrame(Register function_reg) {
619 if (FLAG_enable_embedded_constant_pool) {
620 if (function_reg.is_valid()) {
621 Push(r0, fp, kConstantPoolRegister, cp, function_reg);
624 Push(r0, fp, kConstantPoolRegister, cp);
628 if (function_reg.is_valid()) {
629 Push(r0, fp, cp, function_reg);
636 addi(fp, sp, Operand(fp_delta * kPointerSize));
639 void TurboAssembler::RestoreFrameStateForTailCall() {
640 if (FLAG_enable_embedded_constant_pool) {
641 LoadP(kConstantPoolRegister,
642 MemOperand(fp, StandardFrameConstants::kConstantPoolOffset));
643 set_constant_pool_available(
false);
645 LoadP(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
646 LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
651 void MacroAssembler::PushSafepointRegisters() {
654 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
655 DCHECK_GE(num_unsaved, 0);
656 if (num_unsaved > 0) {
657 subi(sp, sp, Operand(num_unsaved * kPointerSize));
659 MultiPush(kSafepointSavedRegisters);
663 void MacroAssembler::PopSafepointRegisters() {
664 const int num_unsaved = kNumSafepointRegisters - kNumSafepointSavedRegisters;
665 MultiPop(kSafepointSavedRegisters);
666 if (num_unsaved > 0) {
667 addi(sp, sp, Operand(num_unsaved * kPointerSize));
671 int MacroAssembler::SafepointRegisterStackIndex(
int reg_code) {
674 RegList regs = kSafepointSavedRegisters;
677 DCHECK(reg_code >= 0 && reg_code < kNumRegisters);
679 for (int16_t
i = 0;
i < reg_code;
i++) {
680 if ((regs & (1 <<
i)) != 0) {
689 void TurboAssembler::CanonicalizeNaN(
const DoubleRegister dst,
690 const DoubleRegister src) {
692 fsub(dst, src, kDoubleRegZero);
695 void TurboAssembler::ConvertIntToDouble(Register src, DoubleRegister dst) {
696 MovIntToDouble(dst, src, r0);
700 void TurboAssembler::ConvertUnsignedIntToDouble(Register src,
701 DoubleRegister dst) {
702 MovUnsignedIntToDouble(dst, src, r0);
706 void TurboAssembler::ConvertIntToFloat(Register src, DoubleRegister dst) {
707 MovIntToDouble(dst, src, r0);
711 void TurboAssembler::ConvertUnsignedIntToFloat(Register src,
712 DoubleRegister dst) {
713 MovUnsignedIntToDouble(dst, src, r0);
717 #if V8_TARGET_ARCH_PPC64 718 void TurboAssembler::ConvertInt64ToDouble(Register src,
719 DoubleRegister double_dst) {
720 MovInt64ToDouble(double_dst, src);
721 fcfid(double_dst, double_dst);
724 void TurboAssembler::ConvertUnsignedInt64ToFloat(Register src,
725 DoubleRegister double_dst) {
726 MovInt64ToDouble(double_dst, src);
727 fcfidus(double_dst, double_dst);
730 void TurboAssembler::ConvertUnsignedInt64ToDouble(Register src,
731 DoubleRegister double_dst) {
732 MovInt64ToDouble(double_dst, src);
733 fcfidu(double_dst, double_dst);
736 void TurboAssembler::ConvertInt64ToFloat(Register src,
737 DoubleRegister double_dst) {
738 MovInt64ToDouble(double_dst, src);
739 fcfids(double_dst, double_dst);
743 void TurboAssembler::ConvertDoubleToInt64(
const DoubleRegister double_input,
744 #
if !V8_TARGET_ARCH_PPC64
745 const Register dst_hi,
748 const DoubleRegister double_dst,
749 FPRoundingMode rounding_mode) {
750 if (rounding_mode == kRoundToZero) {
751 fctidz(double_dst, double_input);
753 SetRoundingMode(rounding_mode);
754 fctid(double_dst, double_input);
759 #
if !V8_TARGET_ARCH_PPC64
765 #if V8_TARGET_ARCH_PPC64 766 void TurboAssembler::ConvertDoubleToUnsignedInt64(
767 const DoubleRegister double_input,
const Register dst,
768 const DoubleRegister double_dst, FPRoundingMode rounding_mode) {
769 if (rounding_mode == kRoundToZero) {
770 fctiduz(double_dst, double_input);
772 SetRoundingMode(rounding_mode);
773 fctidu(double_dst, double_input);
777 MovDoubleToInt64(dst, double_dst);
781 #if !V8_TARGET_ARCH_PPC64 782 void TurboAssembler::ShiftLeftPair(Register dst_low, Register dst_high,
783 Register src_low, Register src_high,
784 Register scratch, Register shift) {
785 DCHECK(!AreAliased(dst_low, src_high));
786 DCHECK(!AreAliased(dst_high, src_low));
787 DCHECK(!AreAliased(dst_low, dst_high, shift));
790 cmpi(shift, Operand(32));
793 andi(scratch, shift, Operand(0x1F));
794 slw(dst_high, src_low, scratch);
795 li(dst_low, Operand::Zero());
799 subfic(scratch, shift, Operand(32));
800 slw(dst_high, src_high, shift);
801 srw(scratch, src_low, scratch);
802 orx(dst_high, dst_high, scratch);
803 slw(dst_low, src_low, shift);
807 void TurboAssembler::ShiftLeftPair(Register dst_low, Register dst_high,
808 Register src_low, Register src_high,
810 DCHECK(!AreAliased(dst_low, src_high));
811 DCHECK(!AreAliased(dst_high, src_low));
813 Move(dst_high, src_low);
814 li(dst_low, Operand::Zero());
815 }
else if (shift > 32) {
817 slwi(dst_high, src_low, Operand(shift));
818 li(dst_low, Operand::Zero());
819 }
else if (shift == 0) {
820 Move(dst_low, src_low);
821 Move(dst_high, src_high);
823 slwi(dst_high, src_high, Operand(shift));
824 rlwimi(dst_high, src_low, shift, 32 - shift, 31);
825 slwi(dst_low, src_low, Operand(shift));
829 void TurboAssembler::ShiftRightPair(Register dst_low, Register dst_high,
830 Register src_low, Register src_high,
831 Register scratch, Register shift) {
832 DCHECK(!AreAliased(dst_low, src_high));
833 DCHECK(!AreAliased(dst_high, src_low));
834 DCHECK(!AreAliased(dst_low, dst_high, shift));
837 cmpi(shift, Operand(32));
840 andi(scratch, shift, Operand(0x1F));
841 srw(dst_low, src_high, scratch);
842 li(dst_high, Operand::Zero());
846 subfic(scratch, shift, Operand(32));
847 srw(dst_low, src_low, shift);
848 slw(scratch, src_high, scratch);
849 orx(dst_low, dst_low, scratch);
850 srw(dst_high, src_high, shift);
854 void TurboAssembler::ShiftRightPair(Register dst_low, Register dst_high,
855 Register src_low, Register src_high,
857 DCHECK(!AreAliased(dst_low, src_high));
858 DCHECK(!AreAliased(dst_high, src_low));
860 Move(dst_low, src_high);
861 li(dst_high, Operand::Zero());
862 }
else if (shift > 32) {
864 srwi(dst_low, src_high, Operand(shift));
865 li(dst_high, Operand::Zero());
866 }
else if (shift == 0) {
867 Move(dst_low, src_low);
868 Move(dst_high, src_high);
870 srwi(dst_low, src_low, Operand(shift));
871 rlwimi(dst_low, src_high, 32 - shift, 0, shift - 1);
872 srwi(dst_high, src_high, Operand(shift));
876 void TurboAssembler::ShiftRightAlgPair(Register dst_low, Register dst_high,
877 Register src_low, Register src_high,
878 Register scratch, Register shift) {
879 DCHECK(!AreAliased(dst_low, src_high, shift));
880 DCHECK(!AreAliased(dst_high, src_low, shift));
883 cmpi(shift, Operand(32));
886 andi(scratch, shift, Operand(0x1F));
887 sraw(dst_low, src_high, scratch);
888 srawi(dst_high, src_high, 31);
892 subfic(scratch, shift, Operand(32));
893 srw(dst_low, src_low, shift);
894 slw(scratch, src_high, scratch);
895 orx(dst_low, dst_low, scratch);
896 sraw(dst_high, src_high, shift);
900 void TurboAssembler::ShiftRightAlgPair(Register dst_low, Register dst_high,
901 Register src_low, Register src_high,
903 DCHECK(!AreAliased(dst_low, src_high));
904 DCHECK(!AreAliased(dst_high, src_low));
906 Move(dst_low, src_high);
907 srawi(dst_high, src_high, 31);
908 }
else if (shift > 32) {
910 srawi(dst_low, src_high, shift);
911 srawi(dst_high, src_high, 31);
912 }
else if (shift == 0) {
913 Move(dst_low, src_low);
914 Move(dst_high, src_high);
916 srwi(dst_low, src_low, Operand(shift));
917 rlwimi(dst_low, src_high, 32 - shift, 0, shift - 1);
918 srawi(dst_high, src_high, shift);
923 void TurboAssembler::LoadConstantPoolPointerRegisterFromCodeTargetAddress(
924 Register code_target_address) {
925 lwz(kConstantPoolRegister,
926 MemOperand(code_target_address,
927 Code::kConstantPoolOffset - Code::kHeaderSize));
928 add(kConstantPoolRegister, kConstantPoolRegister, code_target_address);
931 void TurboAssembler::LoadPC(Register dst) {
936 void TurboAssembler::ComputeCodeStartAddress(Register dst) {
939 subi(dst, dst, Operand(pc_offset() - kInstrSize));
943 void TurboAssembler::LoadConstantPoolPointerRegister() {
944 LoadPC(kConstantPoolRegister);
945 int32_t delta = -pc_offset() + 4;
946 add_label_offset(kConstantPoolRegister, kConstantPoolRegister,
947 ConstantPoolPosition(), delta);
950 void TurboAssembler::StubPrologue(StackFrame::Type type) {
952 ConstantPoolUnavailableScope constant_pool_unavailable(
this);
953 mov(r11, Operand(StackFrame::TypeToMarker(type)));
954 PushCommonFrame(r11);
956 if (FLAG_enable_embedded_constant_pool) {
957 LoadConstantPoolPointerRegister();
958 set_constant_pool_available(
true);
962 void TurboAssembler::Prologue() {
963 PushStandardFrame(r4);
964 if (FLAG_enable_embedded_constant_pool) {
966 LoadConstantPoolPointerRegister();
967 set_constant_pool_available(
true);
971 void TurboAssembler::EnterFrame(StackFrame::Type type,
972 bool load_constant_pool_pointer_reg) {
973 if (FLAG_enable_embedded_constant_pool && load_constant_pool_pointer_reg) {
977 LoadConstantPoolPointerRegister();
978 mov(ip, Operand(StackFrame::TypeToMarker(type)));
981 mov(ip, Operand(StackFrame::TypeToMarker(type)));
986 int TurboAssembler::LeaveFrame(StackFrame::Type type,
int stack_adjustment) {
987 ConstantPoolUnavailableScope constant_pool_unavailable(
this);
995 LoadP(r0, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
996 LoadP(ip, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
997 if (FLAG_enable_embedded_constant_pool) {
998 LoadP(kConstantPoolRegister,
999 MemOperand(fp, StandardFrameConstants::kConstantPoolOffset));
1002 frame_ends = pc_offset();
1003 Add(sp, fp, StandardFrameConstants::kCallerSPOffset + stack_adjustment, r0);
1024 void MacroAssembler::EnterExitFrame(
bool save_doubles,
int stack_space,
1025 StackFrame::Type frame_type) {
1026 DCHECK(frame_type == StackFrame::EXIT ||
1027 frame_type == StackFrame::BUILTIN_EXIT);
1029 DCHECK_EQ(2 * kPointerSize, ExitFrameConstants::kCallerSPDisplacement);
1030 DCHECK_EQ(1 * kPointerSize, ExitFrameConstants::kCallerPCOffset);
1031 DCHECK_EQ(0 * kPointerSize, ExitFrameConstants::kCallerFPOffset);
1032 DCHECK_GT(stack_space, 0);
1038 mov(ip, Operand(StackFrame::TypeToMarker(frame_type)));
1039 PushCommonFrame(ip);
1041 subi(sp, fp, Operand(ExitFrameConstants::kFixedFrameSizeFromFp));
1043 if (emit_debug_code()) {
1044 li(r8, Operand::Zero());
1045 StoreP(r8, MemOperand(fp, ExitFrameConstants::kSPOffset));
1047 if (FLAG_enable_embedded_constant_pool) {
1048 StoreP(kConstantPoolRegister,
1049 MemOperand(fp, ExitFrameConstants::kConstantPoolOffset));
1051 Move(r8, CodeObject());
1052 StoreP(r8, MemOperand(fp, ExitFrameConstants::kCodeOffset));
1055 Move(r8, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
1057 StoreP(fp, MemOperand(r8));
1059 ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()));
1060 StoreP(cp, MemOperand(r8));
1064 MultiPushDoubles(kCallerSavedDoubles);
1071 addi(sp, sp, Operand(-stack_space * kPointerSize));
1075 const int frame_alignment = ActivationFrameAlignment();
1076 if (frame_alignment > kPointerSize) {
1077 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
1078 ClearRightImm(sp, sp, Operand(WhichPowerOf2(frame_alignment)));
1080 li(r0, Operand::Zero());
1081 StorePU(r0, MemOperand(sp, -kNumRequiredStackFrameSlots * kPointerSize));
1085 addi(r8, sp, Operand((kStackFrameExtraParamSlot + 1) * kPointerSize));
1086 StoreP(r8, MemOperand(fp, ExitFrameConstants::kSPOffset));
1089 int TurboAssembler::ActivationFrameAlignment() {
1090 #if !defined(USE_SIMULATOR) 1095 return base::OS::ActivationFrameAlignment();
1101 return FLAG_sim_stack_alignment;
1106 void MacroAssembler::LeaveExitFrame(
bool save_doubles, Register argument_count,
1107 bool argument_count_is_length) {
1108 ConstantPoolUnavailableScope constant_pool_unavailable(
this);
1112 const int kNumRegs = kNumCallerSavedDoubles;
1114 (ExitFrameConstants::kFixedFrameSizeFromFp + kNumRegs * kDoubleSize);
1115 addi(r6, fp, Operand(-offset));
1116 MultiPopDoubles(kCallerSavedDoubles, r6);
1120 li(r6, Operand::Zero());
1121 Move(ip, ExternalReference::Create(IsolateAddressId::kCEntryFPAddress,
1123 StoreP(r6, MemOperand(ip));
1127 ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()));
1128 LoadP(cp, MemOperand(ip));
1131 mov(r6, Operand(Context::kInvalidContext));
1133 ExternalReference::Create(IsolateAddressId::kContextAddress, isolate()));
1134 StoreP(r6, MemOperand(ip));
1138 LeaveFrame(StackFrame::EXIT);
1140 if (argument_count.is_valid()) {
1141 if (!argument_count_is_length) {
1142 ShiftLeftImm(argument_count, argument_count, Operand(kPointerSizeLog2));
1144 add(sp, sp, argument_count);
1148 void TurboAssembler::MovFromFloatResult(
const DoubleRegister dst) {
1152 void TurboAssembler::MovFromFloatParameter(
const DoubleRegister dst) {
1156 void TurboAssembler::PrepareForTailCall(
const ParameterCount& callee_args_count,
1157 Register caller_args_count_reg,
1158 Register scratch0, Register scratch1) {
1160 if (callee_args_count.is_reg()) {
1161 DCHECK(!AreAliased(callee_args_count.reg(), caller_args_count_reg, scratch0,
1164 DCHECK(!AreAliased(caller_args_count_reg, scratch0, scratch1));
1171 Register dst_reg = scratch0;
1172 ShiftLeftImm(dst_reg, caller_args_count_reg, Operand(kPointerSizeLog2));
1173 add(dst_reg, fp, dst_reg);
1174 addi(dst_reg, dst_reg,
1175 Operand(StandardFrameConstants::kCallerSPOffset + kPointerSize));
1177 Register src_reg = caller_args_count_reg;
1179 if (callee_args_count.is_reg()) {
1180 ShiftLeftImm(src_reg, callee_args_count.reg(), Operand(kPointerSizeLog2));
1181 add(src_reg, sp, src_reg);
1182 addi(src_reg, src_reg, Operand(kPointerSize));
1184 Add(src_reg, sp, (callee_args_count.immediate() + 1) * kPointerSize, r0);
1187 if (FLAG_debug_code) {
1188 cmpl(src_reg, dst_reg);
1189 Check(lt, AbortReason::kStackAccessBelowStackPointer);
1194 RestoreFrameStateForTailCall();
1201 Register tmp_reg = scratch1;
1203 if (callee_args_count.is_reg()) {
1204 addi(tmp_reg, callee_args_count.reg(), Operand(1));
1206 mov(tmp_reg, Operand(callee_args_count.immediate() + 1));
1210 LoadPU(tmp_reg, MemOperand(src_reg, -kPointerSize));
1211 StorePU(tmp_reg, MemOperand(dst_reg, -kPointerSize));
1218 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
1219 const ParameterCount& actual, Label* done,
1220 bool* definitely_mismatches,
1222 bool definitely_matches =
false;
1223 *definitely_mismatches =
false;
1224 Label regular_invoke;
1240 if (expected.is_immediate()) {
1241 DCHECK(actual.is_immediate());
1242 mov(r3, Operand(actual.immediate()));
1243 if (expected.immediate() == actual.immediate()) {
1244 definitely_matches =
true;
1246 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1247 if (expected.immediate() == sentinel) {
1252 definitely_matches =
true;
1254 *definitely_mismatches =
true;
1255 mov(r5, Operand(expected.immediate()));
1259 if (actual.is_immediate()) {
1260 mov(r3, Operand(actual.immediate()));
1261 cmpi(expected.reg(), Operand(actual.immediate()));
1262 beq(®ular_invoke);
1264 cmp(expected.reg(), actual.reg());
1265 beq(®ular_invoke);
1269 if (!definitely_matches) {
1270 Handle<Code> adaptor = BUILTIN_CODE(isolate(), ArgumentsAdaptorTrampoline);
1271 if (flag == CALL_FUNCTION) {
1273 if (!*definitely_mismatches) {
1277 Jump(adaptor, RelocInfo::CODE_TARGET);
1279 bind(®ular_invoke);
1283 void MacroAssembler::CheckDebugHook(Register fun, Register new_target,
1284 const ParameterCount& expected,
1285 const ParameterCount& actual) {
1288 ExternalReference debug_hook_active =
1289 ExternalReference::debug_hook_on_function_call_address(isolate());
1290 Move(r7, debug_hook_active);
1291 LoadByte(r7, MemOperand(r7), r0);
1293 CmpSmiLiteral(r7, Smi::zero(), r0);
1298 if (actual.is_reg()) {
1299 mr(r7, actual.reg());
1301 mov(r7, Operand(actual.immediate()));
1303 ShiftLeftImm(r7, r7, Operand(kPointerSizeLog2));
1304 LoadPX(r7, MemOperand(sp, r7));
1305 FrameScope frame(
this,
1306 has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
1307 if (expected.is_reg()) {
1308 SmiTag(expected.reg());
1309 Push(expected.reg());
1311 if (actual.is_reg()) {
1312 SmiTag(actual.reg());
1315 if (new_target.is_valid()) {
1319 CallRuntime(Runtime::kDebugOnFunctionCall);
1321 if (new_target.is_valid()) {
1324 if (actual.is_reg()) {
1326 SmiUntag(actual.reg());
1328 if (expected.is_reg()) {
1329 Pop(expected.reg());
1330 SmiUntag(expected.reg());
1336 void MacroAssembler::InvokeFunctionCode(Register
function, Register new_target,
1337 const ParameterCount& expected,
1338 const ParameterCount& actual,
1341 DCHECK(flag == JUMP_FUNCTION || has_frame());
1342 DCHECK(
function == r4);
1343 DCHECK_IMPLIES(new_target.is_valid(), new_target == r6);
1346 CheckDebugHook(
function, new_target, expected, actual);
1349 if (!new_target.is_valid()) {
1350 LoadRoot(r6, RootIndex::kUndefinedValue);
1354 bool definitely_mismatches =
false;
1355 InvokePrologue(expected, actual, &done, &definitely_mismatches, flag);
1356 if (!definitely_mismatches) {
1360 Register code = kJavaScriptCallCodeStartRegister;
1361 LoadP(code, FieldMemOperand(
function, JSFunction::kCodeOffset));
1362 addi(code, code, Operand(Code::kHeaderSize - kHeapObjectTag));
1363 if (flag == CALL_FUNCTION) {
1366 DCHECK(flag == JUMP_FUNCTION);
1367 JumpToJSEntry(code);
1376 void MacroAssembler::InvokeFunction(Register fun, Register new_target,
1377 const ParameterCount& actual,
1380 DCHECK(flag == JUMP_FUNCTION || has_frame());
1385 Register expected_reg = r5;
1386 Register temp_reg = r7;
1388 LoadP(temp_reg, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1389 LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1390 LoadHalfWord(expected_reg,
1392 temp_reg, SharedFunctionInfo::kFormalParameterCountOffset));
1394 ParameterCount expected(expected_reg);
1395 InvokeFunctionCode(fun, new_target, expected, actual, flag);
1398 void MacroAssembler::InvokeFunction(Register
function,
1399 const ParameterCount& expected,
1400 const ParameterCount& actual,
1403 DCHECK(flag == JUMP_FUNCTION || has_frame());
1406 DCHECK(
function == r4);
1409 LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1411 InvokeFunctionCode(r4, no_reg, expected, actual, flag);
1414 void MacroAssembler::MaybeDropFrames() {
1416 ExternalReference restart_fp =
1417 ExternalReference::debug_restart_fp_address(isolate());
1418 Move(r4, restart_fp);
1419 LoadP(r4, MemOperand(r4));
1420 cmpi(r4, Operand::Zero());
1421 Jump(BUILTIN_CODE(isolate(), FrameDropperTrampoline), RelocInfo::CODE_TARGET,
1425 void MacroAssembler::PushStackHandler() {
1427 STATIC_ASSERT(StackHandlerConstants::kSize == 2 * kPointerSize);
1428 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
1434 mov(r8, Operand(ExternalReference::Create(IsolateAddressId::kHandlerAddress,
1436 LoadP(r0, MemOperand(r8));
1440 StoreP(sp, MemOperand(r8));
1444 void MacroAssembler::PopStackHandler() {
1445 STATIC_ASSERT(StackHandlerConstants::kSize == 2 * kPointerSize);
1446 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1449 mov(ip, Operand(ExternalReference::Create(IsolateAddressId::kHandlerAddress,
1451 StoreP(r4, MemOperand(ip));
1457 void MacroAssembler::CompareObjectType(Register
object, Register map,
1458 Register type_reg, InstanceType type) {
1459 const Register temp = type_reg == no_reg ? r0 : type_reg;
1461 LoadP(map, FieldMemOperand(
object, HeapObject::kMapOffset));
1462 CompareInstanceType(map, temp, type);
1466 void MacroAssembler::CompareInstanceType(Register map, Register type_reg,
1467 InstanceType type) {
1468 STATIC_ASSERT(Map::kInstanceTypeOffset < 4096);
1469 STATIC_ASSERT(LAST_TYPE <= 0xFFFF);
1470 lhz(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
1471 cmpi(type_reg, Operand(type));
1474 void MacroAssembler::CompareRoot(Register obj, RootIndex index) {
1476 LoadRoot(r0, index);
1480 void TurboAssembler::AddAndCheckForOverflow(Register dst, Register left,
1482 Register overflow_dst,
1484 DCHECK(dst != overflow_dst);
1485 DCHECK(dst != scratch);
1486 DCHECK(overflow_dst != scratch);
1487 DCHECK(overflow_dst != left);
1488 DCHECK(overflow_dst != right);
1490 bool left_is_right = left == right;
1491 RCBit xorRC = left_is_right ? SetRC : LeaveRC;
1496 add(dst, left, right);
1497 xor_(overflow_dst, dst, scratch, xorRC);
1498 if (!left_is_right) xor_(scratch, dst, right);
1499 }
else if (dst == right) {
1501 add(dst, left, right);
1502 xor_(overflow_dst, dst, left, xorRC);
1503 if (!left_is_right) xor_(scratch, dst, scratch);
1505 add(dst, left, right);
1506 xor_(overflow_dst, dst, left, xorRC);
1507 if (!left_is_right) xor_(scratch, dst, right);
1509 if (!left_is_right) and_(overflow_dst, scratch, overflow_dst, SetRC);
1512 void TurboAssembler::AddAndCheckForOverflow(Register dst, Register left,
1514 Register overflow_dst,
1516 Register original_left = left;
1517 DCHECK(dst != overflow_dst);
1518 DCHECK(dst != scratch);
1519 DCHECK(overflow_dst != scratch);
1520 DCHECK(overflow_dst != left);
1525 original_left = overflow_dst;
1526 mr(original_left, left);
1528 Add(dst, left, right, scratch);
1529 xor_(overflow_dst, dst, original_left);
1531 and_(overflow_dst, overflow_dst, dst, SetRC);
1533 andc(overflow_dst, overflow_dst, dst, SetRC);
1537 void TurboAssembler::SubAndCheckForOverflow(Register dst, Register left,
1539 Register overflow_dst,
1541 DCHECK(dst != overflow_dst);
1542 DCHECK(dst != scratch);
1543 DCHECK(overflow_dst != scratch);
1544 DCHECK(overflow_dst != left);
1545 DCHECK(overflow_dst != right);
1550 sub(dst, left, right);
1551 xor_(overflow_dst, dst, scratch);
1552 xor_(scratch, scratch, right);
1553 and_(overflow_dst, overflow_dst, scratch, SetRC);
1554 }
else if (dst == right) {
1556 sub(dst, left, right);
1557 xor_(overflow_dst, dst, left);
1558 xor_(scratch, left, scratch);
1559 and_(overflow_dst, overflow_dst, scratch, SetRC);
1561 sub(dst, left, right);
1562 xor_(overflow_dst, dst, left);
1563 xor_(scratch, left, right);
1564 and_(overflow_dst, scratch, overflow_dst, SetRC);
1569 void MacroAssembler::CallStub(CodeStub* stub, Condition cond) {
1570 DCHECK(AllowThisStubCall(stub));
1571 Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1574 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
1575 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
1578 bool TurboAssembler::AllowThisStubCall(CodeStub* stub) {
1579 return has_frame_ || !stub->SometimesSetsUpAFrame();
1582 void MacroAssembler::TryDoubleToInt32Exact(Register result,
1583 DoubleRegister double_input,
1585 DoubleRegister double_scratch) {
1587 DCHECK(double_input != double_scratch);
1589 ConvertDoubleToInt64(double_input,
1590 #
if !V8_TARGET_ARCH_PPC64
1593 result, double_scratch);
1595 #if V8_TARGET_ARCH_PPC64 1596 TestIfInt32(result, r0);
1598 TestIfInt32(scratch, result, r0);
1603 fcfid(double_scratch, double_scratch);
1604 fcmpu(double_scratch, double_input);
1608 void TurboAssembler::TruncateDoubleToI(Isolate* isolate, Zone* zone,
1610 DoubleRegister double_input,
1611 StubCallMode stub_mode) {
1614 TryInlineTruncateDoubleToI(result, double_input, &done);
1620 stfdu(double_input, MemOperand(sp, -kDoubleSize));
1622 if (stub_mode == StubCallMode::kCallWasmRuntimeStub) {
1623 Call(wasm::WasmCode::kDoubleToI, RelocInfo::WASM_STUB_CALL);
1625 Call(BUILTIN_CODE(isolate, DoubleToI), RelocInfo::CODE_TARGET);
1628 LoadP(result, MemOperand(sp));
1629 addi(sp, sp, Operand(kDoubleSize));
1636 void TurboAssembler::TryInlineTruncateDoubleToI(Register result,
1637 DoubleRegister double_input,
1639 DoubleRegister double_scratch = kScratchDoubleReg;
1640 #if !V8_TARGET_ARCH_PPC64 1641 Register scratch = ip;
1644 ConvertDoubleToInt64(double_input,
1645 #
if !V8_TARGET_ARCH_PPC64
1648 result, double_scratch);
1651 #if V8_TARGET_ARCH_PPC64 1652 TestIfInt32(result, r0);
1654 TestIfInt32(scratch, result, r0);
1659 void TurboAssembler::CallRuntimeWithCEntry(Runtime::FunctionId fid,
1661 const Runtime::Function* f = Runtime::FunctionForId(fid);
1666 mov(r3, Operand(f->nargs));
1667 Move(r4, ExternalReference::Create(f));
1668 DCHECK(!AreAliased(centry, r3, r4));
1669 addi(centry, centry, Operand(Code::kHeaderSize - kHeapObjectTag));
1673 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
int num_arguments,
1674 SaveFPRegsMode save_doubles) {
1680 CHECK(f->nargs < 0 || f->nargs == num_arguments);
1686 mov(r3, Operand(num_arguments));
1687 Move(r4, ExternalReference::Create(f));
1688 #if V8_TARGET_ARCH_PPC64 1690 CodeFactory::CEntry(isolate(), f->result_size, save_doubles);
1692 Handle<Code> code = CodeFactory::CEntry(isolate(), 1, save_doubles);
1694 Call(code, RelocInfo::CODE_TARGET);
1697 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
1698 const Runtime::Function*
function = Runtime::FunctionForId(fid);
1699 DCHECK_EQ(1, function->result_size);
1700 if (function->nargs >= 0) {
1701 mov(r3, Operand(function->nargs));
1703 JumpToExternalReference(ExternalReference::Create(fid));
1707 void MacroAssembler::JumpToExternalReference(
const ExternalReference& builtin,
1708 bool builtin_exit_frame) {
1710 Handle<Code> code = CodeFactory::CEntry(isolate(), 1, kDontSaveFPRegs,
1711 kArgvOnStack, builtin_exit_frame);
1712 Jump(code, RelocInfo::CODE_TARGET);
1715 void MacroAssembler::JumpToInstructionStream(Address entry) {
1716 mov(kOffHeapTrampolineRegister, Operand(entry, RelocInfo::OFF_HEAP_TARGET));
1717 Jump(kOffHeapTrampolineRegister);
1720 void MacroAssembler::LoadWeakValue(Register out, Register in,
1721 Label* target_if_cleared) {
1722 cmpi(in, Operand(kClearedWeakHeapObjectLower32));
1723 beq(target_if_cleared);
1725 mov(r0, Operand(~kWeakHeapObjectMask));
1729 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value,
1730 Register scratch1, Register scratch2) {
1731 DCHECK_GT(value, 0);
1732 if (FLAG_native_code_counters && counter->Enabled()) {
1733 Move(scratch2, ExternalReference::Create(counter));
1734 lwz(scratch1, MemOperand(scratch2));
1735 addi(scratch1, scratch1, Operand(value));
1736 stw(scratch1, MemOperand(scratch2));
1741 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value,
1742 Register scratch1, Register scratch2) {
1743 DCHECK_GT(value, 0);
1744 if (FLAG_native_code_counters && counter->Enabled()) {
1745 Move(scratch2, ExternalReference::Create(counter));
1746 lwz(scratch1, MemOperand(scratch2));
1747 subi(scratch1, scratch1, Operand(value));
1748 stw(scratch1, MemOperand(scratch2));
1752 void TurboAssembler::Assert(Condition cond, AbortReason reason,
1754 if (emit_debug_code()) Check(cond, reason, cr);
1757 void TurboAssembler::Check(Condition cond, AbortReason reason, CRegister cr) {
1765 void TurboAssembler::Abort(AbortReason reason) {
1768 const char* msg = GetAbortReason(reason);
1770 RecordComment(
"Abort message: ");
1775 if (trap_on_abort()) {
1780 if (should_abort_hard()) {
1782 FrameScope assume_frame(
this, StackFrame::NONE);
1783 mov(r3, Operand(static_cast<int>(reason)));
1784 PrepareCallCFunction(1, r4);
1785 CallCFunction(ExternalReference::abort_with_reason(), 1);
1789 LoadSmiLiteral(r4, Smi::FromInt(static_cast<int>(reason)));
1795 FrameScope scope(
this, StackFrame::NONE);
1796 Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
1798 Call(BUILTIN_CODE(isolate(), Abort), RelocInfo::CODE_TARGET);
1803 void MacroAssembler::LoadNativeContextSlot(
int index, Register dst) {
1804 LoadP(dst, NativeContextMemOperand());
1805 LoadP(dst, ContextMemOperand(dst, index));
1809 void MacroAssembler::UntagAndJumpIfSmi(Register dst, Register src,
1811 STATIC_ASSERT(kSmiTag == 0);
1812 TestBitRange(src, kSmiTagSize - 1, 0, r0);
1817 void MacroAssembler::JumpIfEitherSmi(Register reg1, Register reg2,
1818 Label* on_either_smi) {
1819 STATIC_ASSERT(kSmiTag == 0);
1820 JumpIfSmi(reg1, on_either_smi);
1821 JumpIfSmi(reg2, on_either_smi);
1824 void MacroAssembler::AssertNotSmi(Register
object) {
1825 if (emit_debug_code()) {
1826 STATIC_ASSERT(kSmiTag == 0);
1827 TestIfSmi(
object, r0);
1828 Check(ne, AbortReason::kOperandIsASmi, cr0);
1833 void MacroAssembler::AssertSmi(Register
object) {
1834 if (emit_debug_code()) {
1835 STATIC_ASSERT(kSmiTag == 0);
1836 TestIfSmi(
object, r0);
1837 Check(eq, AbortReason::kOperandIsNotASmi, cr0);
1841 void MacroAssembler::AssertConstructor(Register
object) {
1842 if (emit_debug_code()) {
1843 STATIC_ASSERT(kSmiTag == 0);
1844 TestIfSmi(
object, r0);
1845 Check(ne, AbortReason::kOperandIsASmiAndNotAConstructor, cr0);
1847 LoadP(
object, FieldMemOperand(
object, HeapObject::kMapOffset));
1848 lbz(
object, FieldMemOperand(
object, Map::kBitFieldOffset));
1849 andi(
object,
object, Operand(Map::IsConstructorBit::kMask));
1851 Check(ne, AbortReason::kOperandIsNotAConstructor, cr0);
1855 void MacroAssembler::AssertFunction(Register
object) {
1856 if (emit_debug_code()) {
1857 STATIC_ASSERT(kSmiTag == 0);
1858 TestIfSmi(
object, r0);
1859 Check(ne, AbortReason::kOperandIsASmiAndNotAFunction, cr0);
1861 CompareObjectType(
object,
object,
object, JS_FUNCTION_TYPE);
1863 Check(eq, AbortReason::kOperandIsNotAFunction);
1868 void MacroAssembler::AssertBoundFunction(Register
object) {
1869 if (emit_debug_code()) {
1870 STATIC_ASSERT(kSmiTag == 0);
1871 TestIfSmi(
object, r0);
1872 Check(ne, AbortReason::kOperandIsASmiAndNotABoundFunction, cr0);
1874 CompareObjectType(
object,
object,
object, JS_BOUND_FUNCTION_TYPE);
1876 Check(eq, AbortReason::kOperandIsNotABoundFunction);
1880 void MacroAssembler::AssertGeneratorObject(Register
object) {
1881 if (!emit_debug_code())
return;
1882 TestIfSmi(
object, r0);
1883 Check(ne, AbortReason::kOperandIsASmiAndNotAGeneratorObject, cr0);
1886 Register map = object;
1888 LoadP(map, FieldMemOperand(
object, HeapObject::kMapOffset));
1892 Register instance_type = object;
1893 CompareInstanceType(map, instance_type, JS_GENERATOR_OBJECT_TYPE);
1897 cmpi(instance_type, Operand(JS_ASYNC_FUNCTION_OBJECT_TYPE));
1901 cmpi(instance_type, Operand(JS_ASYNC_GENERATOR_OBJECT_TYPE));
1906 Check(eq, AbortReason::kOperandIsNotAGeneratorObject);
1909 void MacroAssembler::AssertUndefinedOrAllocationSite(Register
object,
1911 if (emit_debug_code()) {
1912 Label done_checking;
1913 AssertNotSmi(
object);
1914 CompareRoot(
object, RootIndex::kUndefinedValue);
1915 beq(&done_checking);
1916 LoadP(scratch, FieldMemOperand(
object, HeapObject::kMapOffset));
1917 CompareInstanceType(scratch, scratch, ALLOCATION_SITE_TYPE);
1918 Assert(eq, AbortReason::kExpectedUndefinedOrCell);
1919 bind(&done_checking);
1924 static const int kRegisterPassedArguments = 8;
1926 int TurboAssembler::CalculateStackPassedWords(
int num_reg_arguments,
1927 int num_double_arguments) {
1928 int stack_passed_words = 0;
1929 if (num_double_arguments > DoubleRegister::kNumRegisters) {
1930 stack_passed_words +=
1931 2 * (num_double_arguments - DoubleRegister::kNumRegisters);
1934 if (num_reg_arguments > kRegisterPassedArguments) {
1935 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
1937 return stack_passed_words;
1940 void TurboAssembler::PrepareCallCFunction(
int num_reg_arguments,
1941 int num_double_arguments,
1943 int frame_alignment = ActivationFrameAlignment();
1944 int stack_passed_arguments =
1945 CalculateStackPassedWords(num_reg_arguments, num_double_arguments);
1946 int stack_space = kNumRequiredStackFrameSlots;
1948 if (frame_alignment > kPointerSize) {
1952 addi(sp, sp, Operand(-(stack_passed_arguments + 1) * kPointerSize));
1953 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
1954 ClearRightImm(sp, sp, Operand(WhichPowerOf2(frame_alignment)));
1955 StoreP(scratch, MemOperand(sp, stack_passed_arguments * kPointerSize));
1958 stack_space += stack_passed_arguments;
1962 li(r0, Operand::Zero());
1963 StorePU(r0, MemOperand(sp, -stack_space * kPointerSize));
1966 void TurboAssembler::PrepareCallCFunction(
int num_reg_arguments,
1968 PrepareCallCFunction(num_reg_arguments, 0, scratch);
1971 void TurboAssembler::MovToFloatParameter(DoubleRegister src) { Move(d1, src); }
1973 void TurboAssembler::MovToFloatResult(DoubleRegister src) { Move(d1, src); }
1975 void TurboAssembler::MovToFloatParameters(DoubleRegister src1,
1976 DoubleRegister src2) {
1987 void TurboAssembler::CallCFunction(ExternalReference
function,
1988 int num_reg_arguments,
1989 int num_double_arguments) {
1991 CallCFunctionHelper(ip, num_reg_arguments, num_double_arguments);
1994 void TurboAssembler::CallCFunction(Register
function,
int num_reg_arguments,
1995 int num_double_arguments) {
1996 CallCFunctionHelper(
function, num_reg_arguments, num_double_arguments);
1999 void TurboAssembler::CallCFunction(ExternalReference
function,
2000 int num_arguments) {
2001 CallCFunction(
function, num_arguments, 0);
2004 void TurboAssembler::CallCFunction(Register
function,
int num_arguments) {
2005 CallCFunction(
function, num_arguments, 0);
2008 void TurboAssembler::CallCFunctionHelper(Register
function,
2009 int num_reg_arguments,
2010 int num_double_arguments) {
2011 DCHECK_LE(num_reg_arguments + num_double_arguments, kMaxCParameters);
2012 DCHECK(has_frame());
2017 Register dest =
function;
2018 if (ABI_USES_FUNCTION_DESCRIPTORS) {
2021 LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(
function, kPointerSize));
2022 LoadP(ip, MemOperand(
function, 0));
2024 }
else if (ABI_CALL_VIA_IP) {
2032 int stack_passed_arguments =
2033 CalculateStackPassedWords(num_reg_arguments, num_double_arguments);
2034 int stack_space = kNumRequiredStackFrameSlots + stack_passed_arguments;
2035 if (ActivationFrameAlignment() > kPointerSize) {
2036 LoadP(sp, MemOperand(sp, stack_space * kPointerSize));
2038 addi(sp, sp, Operand(stack_space * kPointerSize));
2043 void TurboAssembler::CheckPageFlag(
2046 int mask, Condition cc, Label* condition_met) {
2047 DCHECK(cc == ne || cc == eq);
2048 ClearRightImm(scratch,
object, Operand(kPageSizeBits));
2049 LoadP(scratch, MemOperand(scratch, MemoryChunk::kFlagsOffset));
2051 mov(r0, Operand(mask));
2052 and_(r0, scratch, r0, SetRC);
2055 bne(condition_met, cr0);
2058 beq(condition_met, cr0);
2062 void TurboAssembler::SetRoundingMode(FPRoundingMode RN) { mtfsfi(7, RN); }
2064 void TurboAssembler::ResetRoundingMode() {
2065 mtfsfi(7, kRoundToNearest);
2074 void TurboAssembler::LoadIntLiteral(Register dst,
int value) {
2075 mov(dst, Operand(value));
2078 void TurboAssembler::LoadSmiLiteral(Register dst, Smi smi) {
2079 mov(dst, Operand(smi));
2082 void TurboAssembler::LoadDoubleLiteral(DoubleRegister result, Double value,
2084 if (FLAG_enable_embedded_constant_pool && is_constant_pool_available() &&
2085 !(scratch == r0 && ConstantPoolAccessIsInOverflow())) {
2086 ConstantPoolEntry::Access access = ConstantPoolAddEntry(value);
2087 if (access == ConstantPoolEntry::OVERFLOWED) {
2088 addis(scratch, kConstantPoolRegister, Operand::Zero());
2089 lfd(result, MemOperand(scratch, 0));
2091 lfd(result, MemOperand(kConstantPoolRegister, 0));
2099 #if V8_TARGET_ARCH_PPC64 2106 litVal.dval = value.AsUint64();
2108 #if V8_TARGET_ARCH_PPC64 2109 if (CpuFeatures::IsSupported(FPR_GPR_MOV)) {
2110 mov(scratch, Operand(litVal.ival));
2111 mtfprd(result, scratch);
2116 addi(sp, sp, Operand(-kDoubleSize));
2117 #if V8_TARGET_ARCH_PPC64 2118 mov(scratch, Operand(litVal.ival));
2119 std(scratch, MemOperand(sp));
2121 LoadIntLiteral(scratch, litVal.ival[0]);
2122 stw(scratch, MemOperand(sp, 0));
2123 LoadIntLiteral(scratch, litVal.ival[1]);
2124 stw(scratch, MemOperand(sp, 4));
2126 nop(GROUP_ENDING_NOP);
2127 lfd(result, MemOperand(sp, 0));
2128 addi(sp, sp, Operand(kDoubleSize));
2131 void TurboAssembler::MovIntToDouble(DoubleRegister dst, Register src,
2134 #if V8_TARGET_ARCH_PPC64 2135 if (CpuFeatures::IsSupported(FPR_GPR_MOV)) {
2141 DCHECK(src != scratch);
2142 subi(sp, sp, Operand(kDoubleSize));
2143 #if V8_TARGET_ARCH_PPC64 2144 extsw(scratch, src);
2145 std(scratch, MemOperand(sp, 0));
2147 srawi(scratch, src, 31);
2148 stw(scratch, MemOperand(sp, Register::kExponentOffset));
2149 stw(src, MemOperand(sp, Register::kMantissaOffset));
2151 nop(GROUP_ENDING_NOP);
2152 lfd(dst, MemOperand(sp, 0));
2153 addi(sp, sp, Operand(kDoubleSize));
2156 void TurboAssembler::MovUnsignedIntToDouble(DoubleRegister dst, Register src,
2159 #if V8_TARGET_ARCH_PPC64 2160 if (CpuFeatures::IsSupported(FPR_GPR_MOV)) {
2166 DCHECK(src != scratch);
2167 subi(sp, sp, Operand(kDoubleSize));
2168 #if V8_TARGET_ARCH_PPC64 2169 clrldi(scratch, src, Operand(32));
2170 std(scratch, MemOperand(sp, 0));
2172 li(scratch, Operand::Zero());
2173 stw(scratch, MemOperand(sp, Register::kExponentOffset));
2174 stw(src, MemOperand(sp, Register::kMantissaOffset));
2176 nop(GROUP_ENDING_NOP);
2177 lfd(dst, MemOperand(sp, 0));
2178 addi(sp, sp, Operand(kDoubleSize));
2181 void TurboAssembler::MovInt64ToDouble(DoubleRegister dst,
2182 #
if !V8_TARGET_ARCH_PPC64
2186 #if V8_TARGET_ARCH_PPC64 2187 if (CpuFeatures::IsSupported(FPR_GPR_MOV)) {
2193 subi(sp, sp, Operand(kDoubleSize));
2194 #if V8_TARGET_ARCH_PPC64 2195 std(src, MemOperand(sp, 0));
2197 stw(src_hi, MemOperand(sp, Register::kExponentOffset));
2198 stw(src, MemOperand(sp, Register::kMantissaOffset));
2200 nop(GROUP_ENDING_NOP);
2201 lfd(dst, MemOperand(sp, 0));
2202 addi(sp, sp, Operand(kDoubleSize));
2206 #if V8_TARGET_ARCH_PPC64 2207 void TurboAssembler::MovInt64ComponentsToDouble(DoubleRegister dst,
2211 if (CpuFeatures::IsSupported(FPR_GPR_MOV)) {
2212 sldi(scratch, src_hi, Operand(32));
2213 rldimi(scratch, src_lo, 0, 32);
2214 mtfprd(dst, scratch);
2218 subi(sp, sp, Operand(kDoubleSize));
2219 stw(src_hi, MemOperand(sp, Register::kExponentOffset));
2220 stw(src_lo, MemOperand(sp, Register::kMantissaOffset));
2221 nop(GROUP_ENDING_NOP);
2222 lfd(dst, MemOperand(sp));
2223 addi(sp, sp, Operand(kDoubleSize));
2227 void TurboAssembler::InsertDoubleLow(DoubleRegister dst, Register src,
2229 #if V8_TARGET_ARCH_PPC64 2230 if (CpuFeatures::IsSupported(FPR_GPR_MOV)) {
2231 mffprd(scratch, dst);
2232 rldimi(scratch, src, 0, 32);
2233 mtfprd(dst, scratch);
2238 subi(sp, sp, Operand(kDoubleSize));
2239 stfd(dst, MemOperand(sp));
2240 stw(src, MemOperand(sp, Register::kMantissaOffset));
2241 nop(GROUP_ENDING_NOP);
2242 lfd(dst, MemOperand(sp));
2243 addi(sp, sp, Operand(kDoubleSize));
2246 void TurboAssembler::InsertDoubleHigh(DoubleRegister dst, Register src,
2248 #if V8_TARGET_ARCH_PPC64 2249 if (CpuFeatures::IsSupported(FPR_GPR_MOV)) {
2250 mffprd(scratch, dst);
2251 rldimi(scratch, src, 32, 0);
2252 mtfprd(dst, scratch);
2257 subi(sp, sp, Operand(kDoubleSize));
2258 stfd(dst, MemOperand(sp));
2259 stw(src, MemOperand(sp, Register::kExponentOffset));
2260 nop(GROUP_ENDING_NOP);
2261 lfd(dst, MemOperand(sp));
2262 addi(sp, sp, Operand(kDoubleSize));
2265 void TurboAssembler::MovDoubleLowToInt(Register dst, DoubleRegister src) {
2266 #if V8_TARGET_ARCH_PPC64 2267 if (CpuFeatures::IsSupported(FPR_GPR_MOV)) {
2273 subi(sp, sp, Operand(kDoubleSize));
2274 stfd(src, MemOperand(sp));
2275 nop(GROUP_ENDING_NOP);
2276 lwz(dst, MemOperand(sp, Register::kMantissaOffset));
2277 addi(sp, sp, Operand(kDoubleSize));
2280 void TurboAssembler::MovDoubleHighToInt(Register dst, DoubleRegister src) {
2281 #if V8_TARGET_ARCH_PPC64 2282 if (CpuFeatures::IsSupported(FPR_GPR_MOV)) {
2284 srdi(dst, dst, Operand(32));
2289 subi(sp, sp, Operand(kDoubleSize));
2290 stfd(src, MemOperand(sp));
2291 nop(GROUP_ENDING_NOP);
2292 lwz(dst, MemOperand(sp, Register::kExponentOffset));
2293 addi(sp, sp, Operand(kDoubleSize));
2296 void TurboAssembler::MovDoubleToInt64(
2297 #
if !V8_TARGET_ARCH_PPC64
2300 Register dst, DoubleRegister src) {
2301 #if V8_TARGET_ARCH_PPC64 2302 if (CpuFeatures::IsSupported(FPR_GPR_MOV)) {
2308 subi(sp, sp, Operand(kDoubleSize));
2309 stfd(src, MemOperand(sp));
2310 nop(GROUP_ENDING_NOP);
2311 #if V8_TARGET_ARCH_PPC64 2312 ld(dst, MemOperand(sp, 0));
2314 lwz(dst_hi, MemOperand(sp, Register::kExponentOffset));
2315 lwz(dst, MemOperand(sp, Register::kMantissaOffset));
2317 addi(sp, sp, Operand(kDoubleSize));
2320 void TurboAssembler::MovIntToFloat(DoubleRegister dst, Register src) {
2321 subi(sp, sp, Operand(kFloatSize));
2322 stw(src, MemOperand(sp, 0));
2323 nop(GROUP_ENDING_NOP);
2324 lfs(dst, MemOperand(sp, 0));
2325 addi(sp, sp, Operand(kFloatSize));
2328 void TurboAssembler::MovFloatToInt(Register dst, DoubleRegister src) {
2329 subi(sp, sp, Operand(kFloatSize));
2330 stfs(src, MemOperand(sp, 0));
2331 nop(GROUP_ENDING_NOP);
2332 lwz(dst, MemOperand(sp, 0));
2333 addi(sp, sp, Operand(kFloatSize));
2336 void TurboAssembler::Add(Register dst, Register src, intptr_t value,
2338 if (is_int16(value)) {
2339 addi(dst, src, Operand(value));
2341 mov(scratch, Operand(value));
2342 add(dst, src, scratch);
2347 void TurboAssembler::Cmpi(Register src1,
const Operand& src2, Register scratch,
2349 intptr_t value = src2.immediate();
2350 if (is_int16(value)) {
2351 cmpi(src1, src2, cr);
2354 cmp(src1, scratch, cr);
2358 void TurboAssembler::Cmpli(Register src1,
const Operand& src2, Register scratch,
2360 intptr_t value = src2.immediate();
2361 if (is_uint16(value)) {
2362 cmpli(src1, src2, cr);
2365 cmpl(src1, scratch, cr);
2369 void TurboAssembler::Cmpwi(Register src1,
const Operand& src2, Register scratch,
2371 intptr_t value = src2.immediate();
2372 if (is_int16(value)) {
2373 cmpwi(src1, src2, cr);
2376 cmpw(src1, scratch, cr);
2381 void MacroAssembler::Cmplwi(Register src1,
const Operand& src2,
2382 Register scratch, CRegister cr) {
2383 intptr_t value = src2.immediate();
2384 if (is_uint16(value)) {
2385 cmplwi(src1, src2, cr);
2388 cmplw(src1, scratch, cr);
2393 void MacroAssembler::And(Register ra, Register rs,
const Operand& rb,
2396 and_(ra, rs, rb.rm(), rc);
2398 if (is_uint16(rb.immediate()) && RelocInfo::IsNone(rb.rmode_) &&
2405 and_(ra, rs, r0, rc);
2411 void MacroAssembler::Or(Register ra, Register rs,
const Operand& rb, RCBit rc) {
2413 orx(ra, rs, rb.rm(), rc);
2415 if (is_uint16(rb.immediate()) && RelocInfo::IsNone(rb.rmode_) &&
2422 orx(ra, rs, r0, rc);
2428 void MacroAssembler::Xor(Register ra, Register rs,
const Operand& rb,
2431 xor_(ra, rs, rb.rm(), rc);
2433 if (is_uint16(rb.immediate()) && RelocInfo::IsNone(rb.rmode_) &&
2440 xor_(ra, rs, r0, rc);
2445 void MacroAssembler::CmpSmiLiteral(Register src1, Smi smi, Register scratch,
2447 #if V8_TARGET_ARCH_PPC64 2448 LoadSmiLiteral(scratch, smi);
2449 cmp(src1, scratch, cr);
2451 Cmpi(src1, Operand(smi), scratch, cr);
2455 void MacroAssembler::CmplSmiLiteral(Register src1, Smi smi, Register scratch,
2457 #if V8_TARGET_ARCH_PPC64 2458 LoadSmiLiteral(scratch, smi);
2459 cmpl(src1, scratch, cr);
2461 Cmpli(src1, Operand(smi), scratch, cr);
2465 void MacroAssembler::AddSmiLiteral(Register dst, Register src, Smi smi,
2467 #if V8_TARGET_ARCH_PPC64 2468 LoadSmiLiteral(scratch, smi);
2469 add(dst, src, scratch);
2471 Add(dst, src, reinterpret_cast<intptr_t>(smi), scratch);
2475 void MacroAssembler::SubSmiLiteral(Register dst, Register src, Smi smi,
2477 #if V8_TARGET_ARCH_PPC64 2478 LoadSmiLiteral(scratch, smi);
2479 sub(dst, src, scratch);
2481 Add(dst, src, -(reinterpret_cast<intptr_t>(smi)), scratch);
2485 void MacroAssembler::AndSmiLiteral(Register dst, Register src, Smi smi,
2486 Register scratch, RCBit rc) {
2487 #if V8_TARGET_ARCH_PPC64 2488 LoadSmiLiteral(scratch, smi);
2489 and_(dst, src, scratch, rc);
2491 And(dst, src, Operand(smi), rc);
2497 void TurboAssembler::LoadP(Register dst,
const MemOperand& mem,
2499 DCHECK_EQ(mem.rb(), no_reg);
2500 int offset = mem.offset();
2502 if (!is_int16(offset)) {
2504 DCHECK_NE(scratch, no_reg);
2505 mov(scratch, Operand(offset));
2506 LoadPX(dst, MemOperand(mem.ra(), scratch));
2508 #if V8_TARGET_ARCH_PPC64 2509 int misaligned = (offset & 3);
2514 addi(dst, mem.ra(), Operand((offset & 3) - 4));
2515 ld(dst, MemOperand(dst, (offset & ~3) + 4));
2525 void TurboAssembler::LoadPU(Register dst,
const MemOperand& mem,
2527 int offset = mem.offset();
2529 if (!is_int16(offset)) {
2531 DCHECK(scratch != no_reg);
2532 mov(scratch, Operand(offset));
2533 LoadPUX(dst, MemOperand(mem.ra(), scratch));
2535 #if V8_TARGET_ARCH_PPC64 2544 void TurboAssembler::StoreP(Register src,
const MemOperand& mem,
2546 int offset = mem.offset();
2548 if (!is_int16(offset)) {
2550 DCHECK(scratch != no_reg);
2551 mov(scratch, Operand(offset));
2552 StorePX(src, MemOperand(mem.ra(), scratch));
2554 #if V8_TARGET_ARCH_PPC64 2555 int misaligned = (offset & 3);
2559 DCHECK(scratch != no_reg);
2560 if (scratch == r0) {
2561 LoadIntLiteral(scratch, offset);
2562 stdx(src, MemOperand(mem.ra(), scratch));
2564 addi(scratch, mem.ra(), Operand((offset & 3) - 4));
2565 std(src, MemOperand(scratch, (offset & ~3) + 4));
2576 void TurboAssembler::StorePU(Register src,
const MemOperand& mem,
2578 int offset = mem.offset();
2580 if (!is_int16(offset)) {
2582 DCHECK(scratch != no_reg);
2583 mov(scratch, Operand(offset));
2584 StorePUX(src, MemOperand(mem.ra(), scratch));
2586 #if V8_TARGET_ARCH_PPC64 2594 void TurboAssembler::LoadWordArith(Register dst,
const MemOperand& mem,
2596 int offset = mem.offset();
2598 if (!is_int16(offset)) {
2599 DCHECK(scratch != no_reg);
2600 mov(scratch, Operand(offset));
2601 lwax(dst, MemOperand(mem.ra(), scratch));
2603 #if V8_TARGET_ARCH_PPC64 2604 int misaligned = (offset & 3);
2609 addi(dst, mem.ra(), Operand((offset & 3) - 4));
2610 lwa(dst, MemOperand(dst, (offset & ~3) + 4));
2623 void MacroAssembler::LoadWord(Register dst,
const MemOperand& mem,
2625 Register base = mem.ra();
2626 int offset = mem.offset();
2628 if (!is_int16(offset)) {
2629 LoadIntLiteral(scratch, offset);
2630 lwzx(dst, MemOperand(base, scratch));
2639 void MacroAssembler::StoreWord(Register src,
const MemOperand& mem,
2641 Register base = mem.ra();
2642 int offset = mem.offset();
2644 if (!is_int16(offset)) {
2645 LoadIntLiteral(scratch, offset);
2646 stwx(src, MemOperand(base, scratch));
2653 void MacroAssembler::LoadHalfWordArith(Register dst,
const MemOperand& mem,
2655 int offset = mem.offset();
2657 if (!is_int16(offset)) {
2658 DCHECK(scratch != no_reg);
2659 mov(scratch, Operand(offset));
2660 lhax(dst, MemOperand(mem.ra(), scratch));
2669 void MacroAssembler::LoadHalfWord(Register dst,
const MemOperand& mem,
2671 Register base = mem.ra();
2672 int offset = mem.offset();
2674 if (!is_int16(offset)) {
2675 DCHECK_NE(scratch, no_reg);
2676 LoadIntLiteral(scratch, offset);
2677 lhzx(dst, MemOperand(base, scratch));
2686 void MacroAssembler::StoreHalfWord(Register src,
const MemOperand& mem,
2688 Register base = mem.ra();
2689 int offset = mem.offset();
2691 if (!is_int16(offset)) {
2692 LoadIntLiteral(scratch, offset);
2693 sthx(src, MemOperand(base, scratch));
2702 void MacroAssembler::LoadByte(Register dst,
const MemOperand& mem,
2704 Register base = mem.ra();
2705 int offset = mem.offset();
2707 if (!is_int16(offset)) {
2708 LoadIntLiteral(scratch, offset);
2709 lbzx(dst, MemOperand(base, scratch));
2718 void MacroAssembler::StoreByte(Register src,
const MemOperand& mem,
2720 Register base = mem.ra();
2721 int offset = mem.offset();
2723 if (!is_int16(offset)) {
2724 LoadIntLiteral(scratch, offset);
2725 stbx(src, MemOperand(base, scratch));
2732 void MacroAssembler::LoadRepresentation(Register dst,
const MemOperand& mem,
2733 Representation r, Register scratch) {
2734 DCHECK(!r.IsDouble());
2735 if (r.IsInteger8()) {
2736 LoadByte(dst, mem, scratch);
2738 }
else if (r.IsUInteger8()) {
2739 LoadByte(dst, mem, scratch);
2740 }
else if (r.IsInteger16()) {
2741 LoadHalfWordArith(dst, mem, scratch);
2742 }
else if (r.IsUInteger16()) {
2743 LoadHalfWord(dst, mem, scratch);
2744 #if V8_TARGET_ARCH_PPC64 2745 }
else if (r.IsInteger32()) {
2746 LoadWordArith(dst, mem, scratch);
2749 LoadP(dst, mem, scratch);
2754 void MacroAssembler::StoreRepresentation(Register src,
const MemOperand& mem,
2755 Representation r, Register scratch) {
2756 DCHECK(!r.IsDouble());
2757 if (r.IsInteger8() || r.IsUInteger8()) {
2758 StoreByte(src, mem, scratch);
2759 }
else if (r.IsInteger16() || r.IsUInteger16()) {
2760 StoreHalfWord(src, mem, scratch);
2761 #if V8_TARGET_ARCH_PPC64 2762 }
else if (r.IsInteger32()) {
2763 StoreWord(src, mem, scratch);
2766 if (r.IsHeapObject()) {
2768 }
else if (r.IsSmi()) {
2771 StoreP(src, mem, scratch);
2775 void TurboAssembler::LoadDouble(DoubleRegister dst,
const MemOperand& mem,
2777 Register base = mem.ra();
2778 int offset = mem.offset();
2780 if (!is_int16(offset)) {
2781 mov(scratch, Operand(offset));
2782 lfdx(dst, MemOperand(base, scratch));
2788 void MacroAssembler::LoadDoubleU(DoubleRegister dst,
const MemOperand& mem,
2790 Register base = mem.ra();
2791 int offset = mem.offset();
2793 if (!is_int16(offset)) {
2794 mov(scratch, Operand(offset));
2795 lfdux(dst, MemOperand(base, scratch));
2801 void TurboAssembler::LoadSingle(DoubleRegister dst,
const MemOperand& mem,
2803 Register base = mem.ra();
2804 int offset = mem.offset();
2806 if (!is_int16(offset)) {
2807 mov(scratch, Operand(offset));
2808 lfsx(dst, MemOperand(base, scratch));
2814 void TurboAssembler::LoadSingleU(DoubleRegister dst,
const MemOperand& mem,
2816 Register base = mem.ra();
2817 int offset = mem.offset();
2819 if (!is_int16(offset)) {
2820 mov(scratch, Operand(offset));
2821 lfsux(dst, MemOperand(base, scratch));
2827 void TurboAssembler::StoreDouble(DoubleRegister src,
const MemOperand& mem,
2829 Register base = mem.ra();
2830 int offset = mem.offset();
2832 if (!is_int16(offset)) {
2833 mov(scratch, Operand(offset));
2834 stfdx(src, MemOperand(base, scratch));
2840 void TurboAssembler::StoreDoubleU(DoubleRegister src,
const MemOperand& mem,
2842 Register base = mem.ra();
2843 int offset = mem.offset();
2845 if (!is_int16(offset)) {
2846 mov(scratch, Operand(offset));
2847 stfdux(src, MemOperand(base, scratch));
2853 void TurboAssembler::StoreSingle(DoubleRegister src,
const MemOperand& mem,
2855 Register base = mem.ra();
2856 int offset = mem.offset();
2858 if (!is_int16(offset)) {
2859 mov(scratch, Operand(offset));
2860 stfsx(src, MemOperand(base, scratch));
2866 void TurboAssembler::StoreSingleU(DoubleRegister src,
const MemOperand& mem,
2868 Register base = mem.ra();
2869 int offset = mem.offset();
2871 if (!is_int16(offset)) {
2872 mov(scratch, Operand(offset));
2873 stfsux(src, MemOperand(base, scratch));
2879 Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2, Register reg3,
2880 Register reg4, Register reg5,
2883 if (reg1.is_valid()) regs |= reg1.bit();
2884 if (reg2.is_valid()) regs |= reg2.bit();
2885 if (reg3.is_valid()) regs |= reg3.bit();
2886 if (reg4.is_valid()) regs |= reg4.bit();
2887 if (reg5.is_valid()) regs |= reg5.bit();
2888 if (reg6.is_valid()) regs |= reg6.bit();
2890 const RegisterConfiguration* config = RegisterConfiguration::Default();
2891 for (
int i = 0;
i < config->num_allocatable_general_registers(); ++
i) {
2892 int code = config->GetAllocatableGeneralCode(
i);
2893 Register candidate = Register::from_code(code);
2894 if (regs & candidate.bit())
continue;
2900 void TurboAssembler::SwapP(Register src, Register dst, Register scratch) {
2901 if (src == dst)
return;
2902 DCHECK(!AreAliased(src, dst, scratch));
2908 void TurboAssembler::SwapP(Register src, MemOperand dst, Register scratch) {
2909 if (dst.ra() != r0 && dst.ra().is_valid())
2910 DCHECK(!AreAliased(src, dst.ra(), scratch));
2911 if (dst.rb() != r0 && dst.rb().is_valid())
2912 DCHECK(!AreAliased(src, dst.rb(), scratch));
2913 DCHECK(!AreAliased(src, scratch));
2915 LoadP(src, dst, r0);
2916 StoreP(scratch, dst, r0);
2919 void TurboAssembler::SwapP(MemOperand src, MemOperand dst, Register scratch_0,
2920 Register scratch_1) {
2921 if (src.ra() != r0 && src.ra().is_valid())
2922 DCHECK(!AreAliased(src.ra(), scratch_0, scratch_1));
2923 if (src.rb() != r0 && src.rb().is_valid())
2924 DCHECK(!AreAliased(src.rb(), scratch_0, scratch_1));
2925 if (dst.ra() != r0 && dst.ra().is_valid())
2926 DCHECK(!AreAliased(dst.ra(), scratch_0, scratch_1));
2927 if (dst.rb() != r0 && dst.rb().is_valid())
2928 DCHECK(!AreAliased(dst.rb(), scratch_0, scratch_1));
2929 DCHECK(!AreAliased(scratch_0, scratch_1));
2930 if (is_int16(src.offset()) || is_int16(dst.offset())) {
2931 if (!is_int16(src.offset())) {
2933 MemOperand temp = src;
2937 LoadP(scratch_1, dst, scratch_0);
2938 LoadP(scratch_0, src);
2939 StoreP(scratch_1, src);
2940 StoreP(scratch_0, dst, scratch_1);
2942 LoadP(scratch_1, dst, scratch_0);
2944 LoadP(scratch_0, src, scratch_1);
2945 StoreP(scratch_0, dst, scratch_1);
2947 StoreP(scratch_1, src, scratch_0);
2951 void TurboAssembler::SwapFloat32(DoubleRegister src, DoubleRegister dst,
2952 DoubleRegister scratch) {
2953 if (src == dst)
return;
2954 DCHECK(!AreAliased(src, dst, scratch));
2960 void TurboAssembler::SwapFloat32(DoubleRegister src, MemOperand dst,
2961 DoubleRegister scratch) {
2962 DCHECK(!AreAliased(src, scratch));
2964 LoadSingle(src, dst, r0);
2965 StoreSingle(scratch, dst, r0);
2968 void TurboAssembler::SwapFloat32(MemOperand src, MemOperand dst,
2969 DoubleRegister scratch_0,
2970 DoubleRegister scratch_1) {
2971 DCHECK(!AreAliased(scratch_0, scratch_1));
2972 LoadSingle(scratch_0, src, r0);
2973 LoadSingle(scratch_1, dst, r0);
2974 StoreSingle(scratch_0, dst, r0);
2975 StoreSingle(scratch_1, src, r0);
2978 void TurboAssembler::SwapDouble(DoubleRegister src, DoubleRegister dst,
2979 DoubleRegister scratch) {
2980 if (src == dst)
return;
2981 DCHECK(!AreAliased(src, dst, scratch));
2987 void TurboAssembler::SwapDouble(DoubleRegister src, MemOperand dst,
2988 DoubleRegister scratch) {
2989 DCHECK(!AreAliased(src, scratch));
2991 LoadDouble(src, dst, r0);
2992 StoreDouble(scratch, dst, r0);
2995 void TurboAssembler::SwapDouble(MemOperand src, MemOperand dst,
2996 DoubleRegister scratch_0,
2997 DoubleRegister scratch_1) {
2998 DCHECK(!AreAliased(scratch_0, scratch_1));
2999 LoadDouble(scratch_0, src, r0);
3000 LoadDouble(scratch_1, dst, r0);
3001 StoreDouble(scratch_0, dst, r0);
3002 StoreDouble(scratch_1, src, r0);
3005 void TurboAssembler::ResetSpeculationPoisonRegister() {
3006 mov(kSpeculationPoisonRegister, Operand(-1));
3009 void TurboAssembler::JumpIfEqual(Register x, int32_t y, Label* dest) {
3010 Cmpi(x, Operand(y), r0);
3014 void TurboAssembler::JumpIfLessThan(Register x, int32_t y, Label* dest) {
3015 Cmpi(x, Operand(y), r0);
3022 #endif // V8_TARGET_ARCH_PPC