1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_WASM_BASELINE_LIFTOFF_REGISTER_H_
6#define V8_WASM_BASELINE_LIFTOFF_REGISTER_H_
7
8#include <iosfwd>
9#include <memory>
10
11#include "src/base/bits.h"
12#include "src/wasm/baseline/liftoff-assembler-defs.h"
13#include "src/wasm/wasm-opcodes.h"
14
15namespace v8 {
16namespace internal {
17namespace wasm {
18
19static constexpr bool kNeedI64RegPair = kSystemPointerSize == 4;
20
21enum RegClass : uint8_t {
22 kGpReg,
23 kFpReg,
24 // {kGpRegPair} equals {kNoReg} if {kNeedI64RegPair} is false.
25 kGpRegPair,
26 kNoReg = kGpRegPair + kNeedI64RegPair
27};
28
29enum RegPairHalf : uint8_t { kLowWord = 0, kHighWord = 1 };
30
31static inline constexpr bool needs_reg_pair(ValueType type) {
32 return kNeedI64RegPair && type == kWasmI64;
33}
34
35// TODO(clemensh): Use a switch once we require C++14 support.
36static inline constexpr RegClass reg_class_for(ValueType type) {
37 return needs_reg_pair(type) // i64 on 32 bit
38 ? kGpRegPair
39 : type == kWasmI32 || type == kWasmI64 // int types
40 ? kGpReg
41 : type == kWasmF32 || type == kWasmF64 // float types
42 ? kFpReg
43 : kNoReg; // other (unsupported) types
44}
45
46// Maximum code of a gp cache register.
47static constexpr int kMaxGpRegCode =
48 8 * sizeof(kLiftoffAssemblerGpCacheRegs) -
49 base::bits::CountLeadingZeros(kLiftoffAssemblerGpCacheRegs) - 1;
50// Maximum code of an fp cache register.
51static constexpr int kMaxFpRegCode =
52 8 * sizeof(kLiftoffAssemblerFpCacheRegs) -
53 base::bits::CountLeadingZeros(kLiftoffAssemblerFpCacheRegs) - 1;
54// LiftoffRegister encodes both gp and fp in a unified index space.
55// [0 .. kMaxGpRegCode] encodes gp registers,
56// [kMaxGpRegCode+1 .. kMaxGpRegCode + kMaxFpRegCode] encodes fp registers.
57// I64 values on 32 bit platforms are stored in two registers, both encoded in
58// the same LiftoffRegister value.
59static constexpr int kAfterMaxLiftoffGpRegCode = kMaxGpRegCode + 1;
60static constexpr int kAfterMaxLiftoffFpRegCode =
61 kAfterMaxLiftoffGpRegCode + kMaxFpRegCode + 1;
62static constexpr int kAfterMaxLiftoffRegCode = kAfterMaxLiftoffFpRegCode;
63static constexpr int kBitsPerLiftoffRegCode =
64 32 - base::bits::CountLeadingZeros<uint32_t>(kAfterMaxLiftoffRegCode - 1);
65static constexpr int kBitsPerGpRegCode =
66 32 - base::bits::CountLeadingZeros<uint32_t>(kMaxGpRegCode);
67static constexpr int kBitsPerGpRegPair = 1 + 2 * kBitsPerGpRegCode;
68
69class LiftoffRegister {
70 static constexpr int needed_bits =
71 Max(kNeedI64RegPair ? kBitsPerGpRegPair : 0, kBitsPerLiftoffRegCode);
72 using storage_t = std::conditional<
73 needed_bits <= 8, uint8_t,
74 std::conditional<needed_bits <= 16, uint16_t, uint32_t>::type>::type;
75
76 static_assert(8 * sizeof(storage_t) >= needed_bits,
77 "chosen type is big enough");
78 // Check for smallest required data type being chosen.
79 // Special case for uint8_t as there are no smaller types.
80 static_assert((8 * sizeof(storage_t) < 2 * needed_bits) ||
81 (sizeof(storage_t) == sizeof(uint8_t)),
82 "chosen type is small enough");
83
84 public:
85 explicit LiftoffRegister(Register reg) : LiftoffRegister(reg.code()) {
86 DCHECK_NE(0, kLiftoffAssemblerGpCacheRegs & reg.bit());
87 DCHECK_EQ(reg, gp());
88 }
89 explicit LiftoffRegister(DoubleRegister reg)
90 : LiftoffRegister(kAfterMaxLiftoffGpRegCode + reg.code()) {
91 DCHECK_NE(0, kLiftoffAssemblerFpCacheRegs & reg.bit());
92 DCHECK_EQ(reg, fp());
93 }
94
95 static LiftoffRegister from_liftoff_code(uint32_t code) {
96 DCHECK_LE(0, code);
97 DCHECK_GT(kAfterMaxLiftoffRegCode, code);
98 DCHECK_EQ(code, static_cast<storage_t>(code));
99 return LiftoffRegister(code);
100 }
101
102 static LiftoffRegister from_code(RegClass rc, int code) {
103 switch (rc) {
104 case kGpReg:
105 return LiftoffRegister(Register::from_code(code));
106 case kFpReg:
107 return LiftoffRegister(DoubleRegister::from_code(code));
108 default:
109 UNREACHABLE();
110 }
111 }
112
113 static LiftoffRegister ForPair(Register low, Register high) {
114 DCHECK(kNeedI64RegPair);
115 DCHECK_NE(low, high);
116 storage_t combined_code = low.code() | high.code() << kBitsPerGpRegCode |
117 1 << (2 * kBitsPerGpRegCode);
118 return LiftoffRegister(combined_code);
119 }
120
121 constexpr bool is_pair() const {
122 return kNeedI64RegPair && (code_ & (1 << (2 * kBitsPerGpRegCode))) != 0;
123 }
124 constexpr bool is_gp() const { return code_ < kAfterMaxLiftoffGpRegCode; }
125 constexpr bool is_fp() const {
126 return code_ >= kAfterMaxLiftoffGpRegCode &&
127 code_ < kAfterMaxLiftoffFpRegCode;
128 }
129
130 LiftoffRegister low() const { return LiftoffRegister(low_gp()); }
131
132 LiftoffRegister high() const { return LiftoffRegister(high_gp()); }
133
134 Register low_gp() const {
135 DCHECK(is_pair());
136 static constexpr storage_t kCodeMask = (1 << kBitsPerGpRegCode) - 1;
137 return Register::from_code(code_ & kCodeMask);
138 }
139
140 Register high_gp() const {
141 DCHECK(is_pair());
142 static constexpr storage_t kCodeMask = (1 << kBitsPerGpRegCode) - 1;
143 return Register::from_code((code_ >> kBitsPerGpRegCode) & kCodeMask);
144 }
145
146 Register gp() const {
147 DCHECK(is_gp());
148 return Register::from_code(code_);
149 }
150
151 DoubleRegister fp() const {
152 DCHECK(is_fp());
153 return DoubleRegister::from_code(code_ - kAfterMaxLiftoffGpRegCode);
154 }
155
156 int liftoff_code() const {
157 DCHECK(is_gp() || is_fp());
158 return code_;
159 }
160
161 RegClass reg_class() const {
162 return is_pair() ? kGpRegPair : is_gp() ? kGpReg : kFpReg;
163 }
164
165 bool operator==(const LiftoffRegister other) const {
166 DCHECK_EQ(is_pair(), other.is_pair());
167 return code_ == other.code_;
168 }
169 bool operator!=(const LiftoffRegister other) const {
170 DCHECK_EQ(is_pair(), other.is_pair());
171 return code_ != other.code_;
172 }
173 bool overlaps(const LiftoffRegister other) const {
174 if (is_pair()) return low().overlaps(other) || high().overlaps(other);
175 if (other.is_pair()) return *this == other.low() || *this == other.high();
176 return *this == other;
177 }
178
179 private:
180 storage_t code_;
181
182 explicit constexpr LiftoffRegister(storage_t code) : code_(code) {}
183};
184ASSERT_TRIVIALLY_COPYABLE(LiftoffRegister);
185
186inline std::ostream& operator<<(std::ostream& os, LiftoffRegister reg) {
187 if (reg.is_pair()) {
188 return os << "<" << reg.low_gp() << "+" << reg.high_gp() << ">";
189 } else if (reg.is_gp()) {
190 return os << reg.gp();
191 } else {
192 return os << reg.fp();
193 }
194}
195
196class LiftoffRegList {
197 public:
198 class Iterator;
199
200 static constexpr bool use_u16 = kAfterMaxLiftoffRegCode <= 16;
201 static constexpr bool use_u32 = !use_u16 && kAfterMaxLiftoffRegCode <= 32;
202 using storage_t = std::conditional<
203 use_u16, uint16_t,
204 std::conditional<use_u32, uint32_t, uint64_t>::type>::type;
205
206 static constexpr storage_t kGpMask = storage_t{kLiftoffAssemblerGpCacheRegs};
207 static constexpr storage_t kFpMask = storage_t{kLiftoffAssemblerFpCacheRegs}
208 << kAfterMaxLiftoffGpRegCode;
209
210 constexpr LiftoffRegList() = default;
211
212 Register set(Register reg) { return set(LiftoffRegister(reg)).gp(); }
213 DoubleRegister set(DoubleRegister reg) {
214 return set(LiftoffRegister(reg)).fp();
215 }
216
217 LiftoffRegister set(LiftoffRegister reg) {
218 if (reg.is_pair()) {
219 regs_ |= storage_t{1} << reg.low().liftoff_code();
220 regs_ |= storage_t{1} << reg.high().liftoff_code();
221 } else {
222 regs_ |= storage_t{1} << reg.liftoff_code();
223 }
224 return reg;
225 }
226
227 LiftoffRegister clear(LiftoffRegister reg) {
228 if (reg.is_pair()) {
229 regs_ &= ~(storage_t{1} << reg.low().liftoff_code());
230 regs_ &= ~(storage_t{1} << reg.high().liftoff_code());
231 } else {
232 regs_ &= ~(storage_t{1} << reg.liftoff_code());
233 }
234 return reg;
235 }
236
237 bool has(LiftoffRegister reg) const {
238 if (reg.is_pair()) {
239 DCHECK_EQ(has(reg.low()), has(reg.high()));
240 reg = reg.low();
241 }
242 return (regs_ & (storage_t{1} << reg.liftoff_code())) != 0;
243 }
244 bool has(Register reg) const { return has(LiftoffRegister(reg)); }
245 bool has(DoubleRegister reg) const { return has(LiftoffRegister(reg)); }
246
247 constexpr bool is_empty() const { return regs_ == 0; }
248
249 constexpr unsigned GetNumRegsSet() const {
250 return base::bits::CountPopulation(regs_);
251 }
252
253 constexpr LiftoffRegList operator&(const LiftoffRegList other) const {
254 return LiftoffRegList(regs_ & other.regs_);
255 }
256
257 constexpr LiftoffRegList operator|(const LiftoffRegList other) const {
258 return LiftoffRegList(regs_ | other.regs_);
259 }
260
261 constexpr bool operator==(const LiftoffRegList other) const {
262 return regs_ == other.regs_;
263 }
264 constexpr bool operator!=(const LiftoffRegList other) const {
265 return regs_ != other.regs_;
266 }
267
268 LiftoffRegister GetFirstRegSet() const {
269 DCHECK(!is_empty());
270 int first_code = base::bits::CountTrailingZeros(regs_);
271 return LiftoffRegister::from_liftoff_code(first_code);
272 }
273
274 LiftoffRegister GetLastRegSet() const {
275 DCHECK(!is_empty());
276 int last_code =
277 8 * sizeof(regs_) - 1 - base::bits::CountLeadingZeros(regs_);
278 return LiftoffRegister::from_liftoff_code(last_code);
279 }
280
281 LiftoffRegList MaskOut(const LiftoffRegList mask) const {
282 // Masking out is guaranteed to return a correct reg list, hence no checks
283 // needed.
284 return FromBits(regs_ & ~mask.regs_);
285 }
286
287 RegList GetGpList() { return regs_ & kGpMask; }
288 RegList GetFpList() { return (regs_ & kFpMask) >> kAfterMaxLiftoffGpRegCode; }
289
290 inline Iterator begin() const;
291 inline Iterator end() const;
292
293 static LiftoffRegList FromBits(storage_t bits) {
294 DCHECK_EQ(bits, bits & (kGpMask | kFpMask));
295 return LiftoffRegList(bits);
296 }
297
298 template <storage_t bits>
299 static constexpr LiftoffRegList FromBits() {
300 static_assert(bits == (bits & (kGpMask | kFpMask)), "illegal reg list");
301 return LiftoffRegList(bits);
302 }
303
304 template <typename... Regs>
305 static LiftoffRegList ForRegs(Regs... regs) {
306 LiftoffRegList list;
307 for (LiftoffRegister reg : {LiftoffRegister(regs)...}) list.set(reg);
308 return list;
309 }
310
311 private:
312 storage_t regs_ = 0;
313
314 // Unchecked constructor. Only use for valid bits.
315 explicit constexpr LiftoffRegList(storage_t bits) : regs_(bits) {}
316};
317ASSERT_TRIVIALLY_COPYABLE(LiftoffRegList);
318
319static constexpr LiftoffRegList kGpCacheRegList =
320 LiftoffRegList::FromBits<LiftoffRegList::kGpMask>();
321static constexpr LiftoffRegList kFpCacheRegList =
322 LiftoffRegList::FromBits<LiftoffRegList::kFpMask>();
323
324class LiftoffRegList::Iterator {
325 public:
326 LiftoffRegister operator*() { return remaining_.GetFirstRegSet(); }
327 Iterator& operator++() {
328 remaining_.clear(remaining_.GetFirstRegSet());
329 return *this;
330 }
331 bool operator==(Iterator other) { return remaining_ == other.remaining_; }
332 bool operator!=(Iterator other) { return remaining_ != other.remaining_; }
333
334 private:
335 explicit Iterator(LiftoffRegList remaining) : remaining_(remaining) {}
336 friend class LiftoffRegList;
337
338 LiftoffRegList remaining_;
339};
340
341LiftoffRegList::Iterator LiftoffRegList::begin() const {
342 return Iterator{*this};
343}
344LiftoffRegList::Iterator LiftoffRegList::end() const {
345 return Iterator{LiftoffRegList{}};
346}
347
348static constexpr LiftoffRegList GetCacheRegList(RegClass rc) {
349 return rc == kFpReg ? kFpCacheRegList : kGpCacheRegList;
350}
351
352inline std::ostream& operator<<(std::ostream& os, LiftoffRegList reglist) {
353 os << "{";
354 for (bool first = true; !reglist.is_empty(); first = false) {
355 LiftoffRegister reg = reglist.GetFirstRegSet();
356 reglist.clear(reg);
357 os << (first ? "" : ", ") << reg;
358 }
359 return os << "}";
360}
361
362} // namespace wasm
363} // namespace internal
364} // namespace v8
365
366#endif // V8_WASM_BASELINE_LIFTOFF_REGISTER_H_
367