5 #ifndef V8_WASM_BASELINE_LIFTOFF_REGISTER_H_ 6 #define V8_WASM_BASELINE_LIFTOFF_REGISTER_H_ 11 #include "src/base/bits.h" 12 #include "src/wasm/baseline/liftoff-assembler-defs.h" 13 #include "src/wasm/wasm-opcodes.h" 19 static constexpr
bool kNeedI64RegPair = kPointerSize == 4;
21 enum RegClass : uint8_t {
26 kNoReg = kGpRegPair + kNeedI64RegPair
29 enum RegPairHalf : uint8_t { kLowWord, kHighWord };
31 static inline constexpr
bool needs_reg_pair(ValueType type) {
32 return kNeedI64RegPair && type == kWasmI64;
36 static inline constexpr RegClass reg_class_for(ValueType type) {
37 return needs_reg_pair(type)
39 : type == kWasmI32 || type == kWasmI64
41 : type == kWasmF32 || type == kWasmF64
47 static constexpr
int kMaxGpRegCode =
48 8 *
sizeof(kLiftoffAssemblerGpCacheRegs) -
49 base::bits::CountLeadingZeros(kLiftoffAssemblerGpCacheRegs) - 1;
51 static constexpr
int kMaxFpRegCode =
52 8 *
sizeof(kLiftoffAssemblerFpCacheRegs) -
53 base::bits::CountLeadingZeros(kLiftoffAssemblerFpCacheRegs) - 1;
59 static constexpr
int kAfterMaxLiftoffGpRegCode = kMaxGpRegCode + 1;
60 static constexpr
int kAfterMaxLiftoffFpRegCode =
61 kAfterMaxLiftoffGpRegCode + kMaxFpRegCode + 1;
62 static constexpr
int kAfterMaxLiftoffRegCode = kAfterMaxLiftoffFpRegCode;
63 static constexpr
int kBitsPerLiftoffRegCode =
64 32 - base::bits::CountLeadingZeros<uint32_t>(kAfterMaxLiftoffRegCode - 1);
65 static constexpr
int kBitsPerGpRegCode =
66 32 - base::bits::CountLeadingZeros<uint32_t>(kMaxGpRegCode);
67 static constexpr
int kBitsPerGpRegPair = 1 + 2 * kBitsPerGpRegCode;
70 static constexpr
int needed_bits =
71 Max(kNeedI64RegPair ? kBitsPerGpRegPair : 0, kBitsPerLiftoffRegCode);
72 using storage_t = std::conditional<
73 needed_bits <= 8, uint8_t,
74 std::conditional<needed_bits <= 16, uint16_t, uint32_t>::type>
::type;
76 static_assert(8 *
sizeof(storage_t) >= needed_bits,
77 "chosen type is big enough");
80 static_assert((8 *
sizeof(storage_t) < 2 * needed_bits) ||
81 (
sizeof(storage_t) ==
sizeof(uint8_t)),
82 "chosen type is small enough");
86 DCHECK_NE(0, kLiftoffAssemblerGpCacheRegs & reg.bit());
91 DCHECK_NE(0, kLiftoffAssemblerFpCacheRegs & reg.bit());
97 DCHECK_GT(kAfterMaxLiftoffRegCode, code);
98 DCHECK_EQ(code, static_cast<storage_t>(code));
114 DCHECK(kNeedI64RegPair);
115 DCHECK_NE(low, high);
116 storage_t combined_code = low.code() | high.code() << kBitsPerGpRegCode |
117 1 << (2 * kBitsPerGpRegCode);
121 constexpr
bool is_pair()
const {
122 return kNeedI64RegPair && (code_ & (1 << (2 * kBitsPerGpRegCode))) != 0;
124 constexpr
bool is_gp()
const {
return code_ < kAfterMaxLiftoffGpRegCode; }
125 constexpr
bool is_fp()
const {
126 return code_ >= kAfterMaxLiftoffGpRegCode &&
127 code_ < kAfterMaxLiftoffFpRegCode;
136 static constexpr storage_t kCodeMask = (1 << kBitsPerGpRegCode) - 1;
137 return Register::from_code(code_ & kCodeMask);
142 static constexpr storage_t kCodeMask = (1 << kBitsPerGpRegCode) - 1;
143 return Register::from_code((code_ >> kBitsPerGpRegCode) & kCodeMask);
148 return Register::from_code(code_);
153 return DoubleRegister::from_code(code_ - kAfterMaxLiftoffGpRegCode);
157 DCHECK(is_gp() || is_fp());
161 RegClass reg_class()
const {
162 return is_pair() ? kGpRegPair : is_gp() ? kGpReg : kFpReg;
166 DCHECK_EQ(is_pair(), other.is_pair());
167 return code_ == other.code_;
170 DCHECK_EQ(is_pair(), other.is_pair());
171 return code_ != other.code_;
174 if (is_pair())
return low().overlaps(other) || high().overlaps(other);
175 if (other.is_pair())
return *
this == other.low() || *
this == other.high();
176 return *
this == other;
186 inline std::ostream& operator<<(std::ostream& os,
LiftoffRegister reg) {
188 return os <<
"<" << reg.low_gp() <<
"+" << reg.high_gp() <<
">";
189 }
else if (reg.is_gp()) {
190 return os << reg.gp();
192 return os << reg.fp();
198 static constexpr
bool use_u16 = kAfterMaxLiftoffRegCode <= 16;
199 static constexpr
bool use_u32 = !use_u16 && kAfterMaxLiftoffRegCode <= 32;
200 using storage_t = std::conditional<
202 std::conditional<use_u32, uint32_t, uint64_t>::type>::type;
204 static constexpr storage_t kGpMask = storage_t{kLiftoffAssemblerGpCacheRegs};
205 static constexpr storage_t kFpMask = storage_t{kLiftoffAssemblerFpCacheRegs}
206 << kAfterMaxLiftoffGpRegCode;
217 regs_ |= storage_t{1} << reg.low().liftoff_code();
218 regs_ |= storage_t{1} << reg.high().liftoff_code();
220 regs_ |= storage_t{1} << reg.liftoff_code();
227 regs_ &= ~(storage_t{1} << reg.low().liftoff_code());
228 regs_ &= ~(storage_t{1} << reg.high().liftoff_code());
230 regs_ &= ~(storage_t{1} << reg.liftoff_code());
237 DCHECK_EQ(has(reg.low()), has(reg.high()));
240 return (regs_ & (storage_t{1} << reg.liftoff_code())) != 0;
245 constexpr
bool is_empty()
const {
return regs_ == 0; }
247 constexpr
unsigned GetNumRegsSet()
const {
248 return base::bits::CountPopulation(regs_);
260 return regs_ == other.regs_;
263 return regs_ != other.regs_;
268 unsigned first_code = base::bits::CountTrailingZeros(regs_);
269 return LiftoffRegister::from_liftoff_code(first_code);
275 8 *
sizeof(regs_) - 1 - base::bits::CountLeadingZeros(regs_);
276 return LiftoffRegister::from_liftoff_code(last_code);
282 return FromBits(regs_ & ~mask.regs_);
286 DCHECK_EQ(bits, bits & (kGpMask | kFpMask));
290 template <storage_t bits>
292 static_assert(bits == (bits & (kGpMask | kFpMask)),
"illegal reg list");
296 template <
typename... Regs>
303 RegList GetGpList() {
return regs_ & kGpMask; }
304 RegList GetFpList() {
return (regs_ & kFpMask) >> kAfterMaxLiftoffGpRegCode; }
310 explicit constexpr
LiftoffRegList(storage_t bits) : regs_(bits) {}
315 LiftoffRegList::FromBits<LiftoffRegList::kGpMask>();
317 LiftoffRegList::FromBits<LiftoffRegList::kFpMask>();
320 return rc == kGpReg ? kGpCacheRegList : kFpCacheRegList;
323 inline std::ostream& operator<<(std::ostream& os, LiftoffRegList reglist) {
325 for (
bool first =
true; !reglist.is_empty(); first =
false) {
326 LiftoffRegister reg = reglist.GetFirstRegSet();
328 os << (first ?
"" :
", ") << reg;
337 #endif // V8_WASM_BASELINE_LIFTOFF_REGISTER_H_