1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_COMPILER_BACKEND_INSTRUCTION_H_
6#define V8_COMPILER_BACKEND_INSTRUCTION_H_
7
8#include <deque>
9#include <iosfwd>
10#include <map>
11#include <set>
12
13#include "src/base/compiler-specific.h"
14#include "src/compiler/backend/instruction-codes.h"
15#include "src/compiler/common-operator.h"
16#include "src/compiler/frame.h"
17#include "src/compiler/opcodes.h"
18#include "src/double.h"
19#include "src/external-reference.h"
20#include "src/globals.h"
21#include "src/register-arch.h"
22#include "src/source-position.h"
23#include "src/zone/zone-allocator.h"
24
25namespace v8 {
26namespace internal {
27
28class RegisterConfiguration;
29
30namespace compiler {
31
32class Schedule;
33class SourcePositionTable;
34
35class V8_EXPORT_PRIVATE InstructionOperand {
36 public:
37 static const int kInvalidVirtualRegister = -1;
38
39 enum Kind {
40 INVALID,
41 UNALLOCATED,
42 CONSTANT,
43 IMMEDIATE,
44 // Location operand kinds.
45 EXPLICIT,
46 ALLOCATED,
47 FIRST_LOCATION_OPERAND_KIND = EXPLICIT
48 // Location operand kinds must be last.
49 };
50
51 InstructionOperand() : InstructionOperand(INVALID) {}
52
53 Kind kind() const { return KindField::decode(value_); }
54
55#define INSTRUCTION_OPERAND_PREDICATE(name, type) \
56 bool Is##name() const { return kind() == type; }
57 INSTRUCTION_OPERAND_PREDICATE(Invalid, INVALID)
58 // UnallocatedOperands are place-holder operands created before register
59 // allocation. They later are assigned registers and become AllocatedOperands.
60 INSTRUCTION_OPERAND_PREDICATE(Unallocated, UNALLOCATED)
61 // Constant operands participate in register allocation. They are allocated to
62 // registers but have a special "spilling" behavior. When a ConstantOperand
63 // value must be rematerialized, it is loaded from an immediate constant
64 // rather from an unspilled slot.
65 INSTRUCTION_OPERAND_PREDICATE(Constant, CONSTANT)
66 // ImmediateOperands do not participate in register allocation and are only
67 // embedded directly in instructions, e.g. small integers and on some
68 // platforms Objects.
69 INSTRUCTION_OPERAND_PREDICATE(Immediate, IMMEDIATE)
70 // ExplicitOperands do not participate in register allocation. They are
71 // created by the instruction selector for direct access to registers and
72 // stack slots, completely bypassing the register allocator. They are never
73 // associated with a virtual register
74 INSTRUCTION_OPERAND_PREDICATE(Explicit, EXPLICIT)
75 // AllocatedOperands are registers or stack slots that are assigned by the
76 // register allocator and are always associated with a virtual register.
77 INSTRUCTION_OPERAND_PREDICATE(Allocated, ALLOCATED)
78#undef INSTRUCTION_OPERAND_PREDICATE
79
80 inline bool IsAnyLocationOperand() const;
81 inline bool IsLocationOperand() const;
82 inline bool IsFPLocationOperand() const;
83 inline bool IsAnyRegister() const;
84 inline bool IsRegister() const;
85 inline bool IsFPRegister() const;
86 inline bool IsFloatRegister() const;
87 inline bool IsDoubleRegister() const;
88 inline bool IsSimd128Register() const;
89 inline bool IsAnyStackSlot() const;
90 inline bool IsStackSlot() const;
91 inline bool IsFPStackSlot() const;
92 inline bool IsFloatStackSlot() const;
93 inline bool IsDoubleStackSlot() const;
94 inline bool IsSimd128StackSlot() const;
95
96 template <typename SubKindOperand>
97 static SubKindOperand* New(Zone* zone, const SubKindOperand& op) {
98 void* buffer = zone->New(sizeof(op));
99 return new (buffer) SubKindOperand(op);
100 }
101
102 static void ReplaceWith(InstructionOperand* dest,
103 const InstructionOperand* src) {
104 *dest = *src;
105 }
106
107 bool Equals(const InstructionOperand& that) const {
108 return this->value_ == that.value_;
109 }
110
111 bool Compare(const InstructionOperand& that) const {
112 return this->value_ < that.value_;
113 }
114
115 bool EqualsCanonicalized(const InstructionOperand& that) const {
116 return this->GetCanonicalizedValue() == that.GetCanonicalizedValue();
117 }
118
119 bool CompareCanonicalized(const InstructionOperand& that) const {
120 return this->GetCanonicalizedValue() < that.GetCanonicalizedValue();
121 }
122
123 bool InterferesWith(const InstructionOperand& other) const;
124
125 // APIs to aid debugging. For general-stream APIs, use operator<<.
126 void Print() const;
127
128 protected:
129 explicit InstructionOperand(Kind kind) : value_(KindField::encode(kind)) {}
130
131 inline uint64_t GetCanonicalizedValue() const;
132
133 class KindField : public BitField64<Kind, 0, 3> {};
134
135 uint64_t value_;
136};
137
138using InstructionOperandVector = ZoneVector<InstructionOperand>;
139
140std::ostream& operator<<(std::ostream&, const InstructionOperand&);
141
142#define INSTRUCTION_OPERAND_CASTS(OperandType, OperandKind) \
143 \
144 static OperandType* cast(InstructionOperand* op) { \
145 DCHECK_EQ(OperandKind, op->kind()); \
146 return static_cast<OperandType*>(op); \
147 } \
148 \
149 static const OperandType* cast(const InstructionOperand* op) { \
150 DCHECK_EQ(OperandKind, op->kind()); \
151 return static_cast<const OperandType*>(op); \
152 } \
153 \
154 static OperandType cast(const InstructionOperand& op) { \
155 DCHECK_EQ(OperandKind, op.kind()); \
156 return *static_cast<const OperandType*>(&op); \
157 }
158
159class UnallocatedOperand final : public InstructionOperand {
160 public:
161 enum BasicPolicy { FIXED_SLOT, EXTENDED_POLICY };
162
163 enum ExtendedPolicy {
164 NONE,
165 REGISTER_OR_SLOT,
166 REGISTER_OR_SLOT_OR_CONSTANT,
167 FIXED_REGISTER,
168 FIXED_FP_REGISTER,
169 MUST_HAVE_REGISTER,
170 MUST_HAVE_SLOT,
171 SAME_AS_FIRST_INPUT
172 };
173
174 // Lifetime of operand inside the instruction.
175 enum Lifetime {
176 // USED_AT_START operand is guaranteed to be live only at instruction start.
177 // The register allocator is free to assign the same register to some other
178 // operand used inside instruction (i.e. temporary or output).
179 USED_AT_START,
180
181 // USED_AT_END operand is treated as live until the end of instruction.
182 // This means that register allocator will not reuse its register for any
183 // other operand inside instruction.
184 USED_AT_END
185 };
186
187 UnallocatedOperand(ExtendedPolicy policy, int virtual_register)
188 : UnallocatedOperand(virtual_register) {
189 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
190 value_ |= ExtendedPolicyField::encode(policy);
191 value_ |= LifetimeField::encode(USED_AT_END);
192 }
193
194 UnallocatedOperand(BasicPolicy policy, int index, int virtual_register)
195 : UnallocatedOperand(virtual_register) {
196 DCHECK(policy == FIXED_SLOT);
197 value_ |= BasicPolicyField::encode(policy);
198 value_ |= static_cast<uint64_t>(static_cast<int64_t>(index))
199 << FixedSlotIndexField::kShift;
200 DCHECK(this->fixed_slot_index() == index);
201 }
202
203 UnallocatedOperand(ExtendedPolicy policy, int index, int virtual_register)
204 : UnallocatedOperand(virtual_register) {
205 DCHECK(policy == FIXED_REGISTER || policy == FIXED_FP_REGISTER);
206 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
207 value_ |= ExtendedPolicyField::encode(policy);
208 value_ |= LifetimeField::encode(USED_AT_END);
209 value_ |= FixedRegisterField::encode(index);
210 }
211
212 UnallocatedOperand(ExtendedPolicy policy, Lifetime lifetime,
213 int virtual_register)
214 : UnallocatedOperand(virtual_register) {
215 value_ |= BasicPolicyField::encode(EXTENDED_POLICY);
216 value_ |= ExtendedPolicyField::encode(policy);
217 value_ |= LifetimeField::encode(lifetime);
218 }
219
220 UnallocatedOperand(int reg_id, int slot_id, int virtual_register)
221 : UnallocatedOperand(FIXED_REGISTER, reg_id, virtual_register) {
222 value_ |= HasSecondaryStorageField::encode(true);
223 value_ |= SecondaryStorageField::encode(slot_id);
224 }
225
226 UnallocatedOperand(const UnallocatedOperand& other, int virtual_register) {
227 DCHECK_NE(kInvalidVirtualRegister, virtual_register);
228 value_ = VirtualRegisterField::update(
229 other.value_, static_cast<uint32_t>(virtual_register));
230 }
231
232 // Predicates for the operand policy.
233 bool HasRegisterOrSlotPolicy() const {
234 return basic_policy() == EXTENDED_POLICY &&
235 extended_policy() == REGISTER_OR_SLOT;
236 }
237 bool HasRegisterOrSlotOrConstantPolicy() const {
238 return basic_policy() == EXTENDED_POLICY &&
239 extended_policy() == REGISTER_OR_SLOT_OR_CONSTANT;
240 }
241 bool HasFixedPolicy() const {
242 return basic_policy() == FIXED_SLOT ||
243 extended_policy() == FIXED_REGISTER ||
244 extended_policy() == FIXED_FP_REGISTER;
245 }
246 bool HasRegisterPolicy() const {
247 return basic_policy() == EXTENDED_POLICY &&
248 extended_policy() == MUST_HAVE_REGISTER;
249 }
250 bool HasSlotPolicy() const {
251 return basic_policy() == EXTENDED_POLICY &&
252 extended_policy() == MUST_HAVE_SLOT;
253 }
254 bool HasSameAsInputPolicy() const {
255 return basic_policy() == EXTENDED_POLICY &&
256 extended_policy() == SAME_AS_FIRST_INPUT;
257 }
258 bool HasFixedSlotPolicy() const { return basic_policy() == FIXED_SLOT; }
259 bool HasFixedRegisterPolicy() const {
260 return basic_policy() == EXTENDED_POLICY &&
261 extended_policy() == FIXED_REGISTER;
262 }
263 bool HasFixedFPRegisterPolicy() const {
264 return basic_policy() == EXTENDED_POLICY &&
265 extended_policy() == FIXED_FP_REGISTER;
266 }
267 bool HasSecondaryStorage() const {
268 return basic_policy() == EXTENDED_POLICY &&
269 extended_policy() == FIXED_REGISTER &&
270 HasSecondaryStorageField::decode(value_);
271 }
272 int GetSecondaryStorage() const {
273 DCHECK(HasSecondaryStorage());
274 return SecondaryStorageField::decode(value_);
275 }
276
277 // [basic_policy]: Distinguish between FIXED_SLOT and all other policies.
278 BasicPolicy basic_policy() const { return BasicPolicyField::decode(value_); }
279
280 // [extended_policy]: Only for non-FIXED_SLOT. The finer-grained policy.
281 ExtendedPolicy extended_policy() const {
282 DCHECK(basic_policy() == EXTENDED_POLICY);
283 return ExtendedPolicyField::decode(value_);
284 }
285
286 // [fixed_slot_index]: Only for FIXED_SLOT.
287 int fixed_slot_index() const {
288 DCHECK(HasFixedSlotPolicy());
289 return static_cast<int>(static_cast<int64_t>(value_) >>
290 FixedSlotIndexField::kShift);
291 }
292
293 // [fixed_register_index]: Only for FIXED_REGISTER or FIXED_FP_REGISTER.
294 int fixed_register_index() const {
295 DCHECK(HasFixedRegisterPolicy() || HasFixedFPRegisterPolicy());
296 return FixedRegisterField::decode(value_);
297 }
298
299 // [virtual_register]: The virtual register ID for this operand.
300 int32_t virtual_register() const {
301 return static_cast<int32_t>(VirtualRegisterField::decode(value_));
302 }
303
304 // [lifetime]: Only for non-FIXED_SLOT.
305 bool IsUsedAtStart() const {
306 DCHECK(basic_policy() == EXTENDED_POLICY);
307 return LifetimeField::decode(value_) == USED_AT_START;
308 }
309
310 INSTRUCTION_OPERAND_CASTS(UnallocatedOperand, UNALLOCATED)
311
312 // The encoding used for UnallocatedOperand operands depends on the policy
313 // that is
314 // stored within the operand. The FIXED_SLOT policy uses a compact encoding
315 // because it accommodates a larger pay-load.
316 //
317 // For FIXED_SLOT policy:
318 // +------------------------------------------------+
319 // | slot_index | 0 | virtual_register | 001 |
320 // +------------------------------------------------+
321 //
322 // For all other (extended) policies:
323 // +-----------------------------------------------------+
324 // | reg_index | L | PPP | 1 | virtual_register | 001 |
325 // +-----------------------------------------------------+
326 // L ... Lifetime
327 // P ... Policy
328 //
329 // The slot index is a signed value which requires us to decode it manually
330 // instead of using the BitField utility class.
331
332 STATIC_ASSERT(KindField::kSize == 3);
333
334 class VirtualRegisterField : public BitField64<uint32_t, 3, 32> {};
335
336 // BitFields for all unallocated operands.
337 class BasicPolicyField : public BitField64<BasicPolicy, 35, 1> {};
338
339 // BitFields specific to BasicPolicy::FIXED_SLOT.
340 class FixedSlotIndexField : public BitField64<int, 36, 28> {};
341
342 // BitFields specific to BasicPolicy::EXTENDED_POLICY.
343 class ExtendedPolicyField : public BitField64<ExtendedPolicy, 36, 3> {};
344 class LifetimeField : public BitField64<Lifetime, 39, 1> {};
345 class HasSecondaryStorageField : public BitField64<bool, 40, 1> {};
346 class FixedRegisterField : public BitField64<int, 41, 6> {};
347 class SecondaryStorageField : public BitField64<int, 47, 3> {};
348
349 private:
350 explicit UnallocatedOperand(int virtual_register)
351 : InstructionOperand(UNALLOCATED) {
352 value_ |=
353 VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register));
354 }
355};
356
357class ConstantOperand : public InstructionOperand {
358 public:
359 explicit ConstantOperand(int virtual_register)
360 : InstructionOperand(CONSTANT) {
361 value_ |=
362 VirtualRegisterField::encode(static_cast<uint32_t>(virtual_register));
363 }
364
365 int32_t virtual_register() const {
366 return static_cast<int32_t>(VirtualRegisterField::decode(value_));
367 }
368
369 static ConstantOperand* New(Zone* zone, int virtual_register) {
370 return InstructionOperand::New(zone, ConstantOperand(virtual_register));
371 }
372
373 INSTRUCTION_OPERAND_CASTS(ConstantOperand, CONSTANT)
374
375 STATIC_ASSERT(KindField::kSize == 3);
376 class VirtualRegisterField : public BitField64<uint32_t, 3, 32> {};
377};
378
379class ImmediateOperand : public InstructionOperand {
380 public:
381 enum ImmediateType { INLINE, INDEXED };
382
383 explicit ImmediateOperand(ImmediateType type, int32_t value)
384 : InstructionOperand(IMMEDIATE) {
385 value_ |= TypeField::encode(type);
386 value_ |= static_cast<uint64_t>(static_cast<int64_t>(value))
387 << ValueField::kShift;
388 }
389
390 ImmediateType type() const { return TypeField::decode(value_); }
391
392 int32_t inline_value() const {
393 DCHECK_EQ(INLINE, type());
394 return static_cast<int64_t>(value_) >> ValueField::kShift;
395 }
396
397 int32_t indexed_value() const {
398 DCHECK_EQ(INDEXED, type());
399 return static_cast<int64_t>(value_) >> ValueField::kShift;
400 }
401
402 static ImmediateOperand* New(Zone* zone, ImmediateType type, int32_t value) {
403 return InstructionOperand::New(zone, ImmediateOperand(type, value));
404 }
405
406 INSTRUCTION_OPERAND_CASTS(ImmediateOperand, IMMEDIATE)
407
408 STATIC_ASSERT(KindField::kSize == 3);
409 class TypeField : public BitField64<ImmediateType, 3, 1> {};
410 class ValueField : public BitField64<int32_t, 32, 32> {};
411};
412
413class LocationOperand : public InstructionOperand {
414 public:
415 enum LocationKind { REGISTER, STACK_SLOT };
416
417 LocationOperand(InstructionOperand::Kind operand_kind,
418 LocationOperand::LocationKind location_kind,
419 MachineRepresentation rep, int index)
420 : InstructionOperand(operand_kind) {
421 DCHECK_IMPLIES(location_kind == REGISTER, index >= 0);
422 DCHECK(IsSupportedRepresentation(rep));
423 value_ |= LocationKindField::encode(location_kind);
424 value_ |= RepresentationField::encode(rep);
425 value_ |= static_cast<uint64_t>(static_cast<int64_t>(index))
426 << IndexField::kShift;
427 }
428
429 int index() const {
430 DCHECK(IsStackSlot() || IsFPStackSlot());
431 return static_cast<int64_t>(value_) >> IndexField::kShift;
432 }
433
434 int register_code() const {
435 DCHECK(IsRegister() || IsFPRegister());
436 return static_cast<int64_t>(value_) >> IndexField::kShift;
437 }
438
439 Register GetRegister() const {
440 DCHECK(IsRegister());
441 return Register::from_code(register_code());
442 }
443
444 FloatRegister GetFloatRegister() const {
445 DCHECK(IsFloatRegister());
446 return FloatRegister::from_code(register_code());
447 }
448
449 DoubleRegister GetDoubleRegister() const {
450 // On platforms where FloatRegister, DoubleRegister, and Simd128Register
451 // are all the same type, it's convenient to treat everything as a
452 // DoubleRegister, so be lax about type checking here.
453 DCHECK(IsFPRegister());
454 return DoubleRegister::from_code(register_code());
455 }
456
457 Simd128Register GetSimd128Register() const {
458 DCHECK(IsSimd128Register());
459 return Simd128Register::from_code(register_code());
460 }
461
462 LocationKind location_kind() const {
463 return LocationKindField::decode(value_);
464 }
465
466 MachineRepresentation representation() const {
467 return RepresentationField::decode(value_);
468 }
469
470 static bool IsSupportedRepresentation(MachineRepresentation rep) {
471 switch (rep) {
472 case MachineRepresentation::kWord32:
473 case MachineRepresentation::kWord64:
474 case MachineRepresentation::kFloat32:
475 case MachineRepresentation::kFloat64:
476 case MachineRepresentation::kSimd128:
477 case MachineRepresentation::kTaggedSigned:
478 case MachineRepresentation::kTaggedPointer:
479 case MachineRepresentation::kTagged:
480 case MachineRepresentation::kCompressedSigned:
481 case MachineRepresentation::kCompressedPointer:
482 case MachineRepresentation::kCompressed:
483 return true;
484 case MachineRepresentation::kBit:
485 case MachineRepresentation::kWord8:
486 case MachineRepresentation::kWord16:
487 case MachineRepresentation::kNone:
488 return false;
489 }
490 UNREACHABLE();
491 }
492
493 // Return true if the locations can be moved to one another.
494 bool IsCompatible(LocationOperand* op);
495
496 static LocationOperand* cast(InstructionOperand* op) {
497 DCHECK(op->IsAnyLocationOperand());
498 return static_cast<LocationOperand*>(op);
499 }
500
501 static const LocationOperand* cast(const InstructionOperand* op) {
502 DCHECK(op->IsAnyLocationOperand());
503 return static_cast<const LocationOperand*>(op);
504 }
505
506 static LocationOperand cast(const InstructionOperand& op) {
507 DCHECK(op.IsAnyLocationOperand());
508 return *static_cast<const LocationOperand*>(&op);
509 }
510
511 STATIC_ASSERT(KindField::kSize == 3);
512 class LocationKindField : public BitField64<LocationKind, 3, 2> {};
513 class RepresentationField : public BitField64<MachineRepresentation, 5, 8> {};
514 class IndexField : public BitField64<int32_t, 35, 29> {};
515};
516
517class V8_EXPORT_PRIVATE ExplicitOperand
518 : public NON_EXPORTED_BASE(LocationOperand) {
519 public:
520 ExplicitOperand(LocationKind kind, MachineRepresentation rep, int index);
521
522 static ExplicitOperand* New(Zone* zone, LocationKind kind,
523 MachineRepresentation rep, int index) {
524 return InstructionOperand::New(zone, ExplicitOperand(kind, rep, index));
525 }
526
527 INSTRUCTION_OPERAND_CASTS(ExplicitOperand, EXPLICIT)
528};
529
530class AllocatedOperand : public LocationOperand {
531 public:
532 AllocatedOperand(LocationKind kind, MachineRepresentation rep, int index)
533 : LocationOperand(ALLOCATED, kind, rep, index) {}
534
535 static AllocatedOperand* New(Zone* zone, LocationKind kind,
536 MachineRepresentation rep, int index) {
537 return InstructionOperand::New(zone, AllocatedOperand(kind, rep, index));
538 }
539
540 INSTRUCTION_OPERAND_CASTS(AllocatedOperand, ALLOCATED)
541};
542
543#undef INSTRUCTION_OPERAND_CASTS
544
545bool InstructionOperand::IsAnyLocationOperand() const {
546 return this->kind() >= FIRST_LOCATION_OPERAND_KIND;
547}
548
549bool InstructionOperand::IsLocationOperand() const {
550 return IsAnyLocationOperand() &&
551 !IsFloatingPoint(LocationOperand::cast(this)->representation());
552}
553
554bool InstructionOperand::IsFPLocationOperand() const {
555 return IsAnyLocationOperand() &&
556 IsFloatingPoint(LocationOperand::cast(this)->representation());
557}
558
559bool InstructionOperand::IsAnyRegister() const {
560 return IsAnyLocationOperand() &&
561 LocationOperand::cast(this)->location_kind() ==
562 LocationOperand::REGISTER;
563}
564
565bool InstructionOperand::IsRegister() const {
566 return IsAnyRegister() &&
567 !IsFloatingPoint(LocationOperand::cast(this)->representation());
568}
569
570bool InstructionOperand::IsFPRegister() const {
571 return IsAnyRegister() &&
572 IsFloatingPoint(LocationOperand::cast(this)->representation());
573}
574
575bool InstructionOperand::IsFloatRegister() const {
576 return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
577 MachineRepresentation::kFloat32;
578}
579
580bool InstructionOperand::IsDoubleRegister() const {
581 return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
582 MachineRepresentation::kFloat64;
583}
584
585bool InstructionOperand::IsSimd128Register() const {
586 return IsAnyRegister() && LocationOperand::cast(this)->representation() ==
587 MachineRepresentation::kSimd128;
588}
589
590bool InstructionOperand::IsAnyStackSlot() const {
591 return IsAnyLocationOperand() &&
592 LocationOperand::cast(this)->location_kind() ==
593 LocationOperand::STACK_SLOT;
594}
595
596bool InstructionOperand::IsStackSlot() const {
597 return IsAnyStackSlot() &&
598 !IsFloatingPoint(LocationOperand::cast(this)->representation());
599}
600
601bool InstructionOperand::IsFPStackSlot() const {
602 return IsAnyStackSlot() &&
603 IsFloatingPoint(LocationOperand::cast(this)->representation());
604}
605
606bool InstructionOperand::IsFloatStackSlot() const {
607 return IsAnyLocationOperand() &&
608 LocationOperand::cast(this)->location_kind() ==
609 LocationOperand::STACK_SLOT &&
610 LocationOperand::cast(this)->representation() ==
611 MachineRepresentation::kFloat32;
612}
613
614bool InstructionOperand::IsDoubleStackSlot() const {
615 return IsAnyLocationOperand() &&
616 LocationOperand::cast(this)->location_kind() ==
617 LocationOperand::STACK_SLOT &&
618 LocationOperand::cast(this)->representation() ==
619 MachineRepresentation::kFloat64;
620}
621
622bool InstructionOperand::IsSimd128StackSlot() const {
623 return IsAnyLocationOperand() &&
624 LocationOperand::cast(this)->location_kind() ==
625 LocationOperand::STACK_SLOT &&
626 LocationOperand::cast(this)->representation() ==
627 MachineRepresentation::kSimd128;
628}
629
630uint64_t InstructionOperand::GetCanonicalizedValue() const {
631 if (IsAnyLocationOperand()) {
632 MachineRepresentation canonical = MachineRepresentation::kNone;
633 if (IsFPRegister()) {
634 if (kSimpleFPAliasing) {
635 // We treat all FP register operands the same for simple aliasing.
636 canonical = MachineRepresentation::kFloat64;
637 } else {
638 // We need to distinguish FP register operands of different reps when
639 // aliasing is not simple (e.g. ARM).
640 canonical = LocationOperand::cast(this)->representation();
641 }
642 }
643 return InstructionOperand::KindField::update(
644 LocationOperand::RepresentationField::update(this->value_, canonical),
645 LocationOperand::EXPLICIT);
646 }
647 return this->value_;
648}
649
650// Required for maps that don't care about machine type.
651struct CompareOperandModuloType {
652 bool operator()(const InstructionOperand& a,
653 const InstructionOperand& b) const {
654 return a.CompareCanonicalized(b);
655 }
656};
657
658class V8_EXPORT_PRIVATE MoveOperands final
659 : public NON_EXPORTED_BASE(ZoneObject) {
660 public:
661 MoveOperands(const InstructionOperand& source,
662 const InstructionOperand& destination)
663 : source_(source), destination_(destination) {
664 DCHECK(!source.IsInvalid() && !destination.IsInvalid());
665 }
666
667 const InstructionOperand& source() const { return source_; }
668 InstructionOperand& source() { return source_; }
669 void set_source(const InstructionOperand& operand) { source_ = operand; }
670
671 const InstructionOperand& destination() const { return destination_; }
672 InstructionOperand& destination() { return destination_; }
673 void set_destination(const InstructionOperand& operand) {
674 destination_ = operand;
675 }
676
677 // The gap resolver marks moves as "in-progress" by clearing the
678 // destination (but not the source).
679 bool IsPending() const {
680 return destination_.IsInvalid() && !source_.IsInvalid();
681 }
682 void SetPending() { destination_ = InstructionOperand(); }
683
684 // A move is redundant if it's been eliminated or if its source and
685 // destination are the same.
686 bool IsRedundant() const {
687 DCHECK_IMPLIES(!destination_.IsInvalid(), !destination_.IsConstant());
688 return IsEliminated() || source_.EqualsCanonicalized(destination_);
689 }
690
691 // We clear both operands to indicate move that's been eliminated.
692 void Eliminate() { source_ = destination_ = InstructionOperand(); }
693 bool IsEliminated() const {
694 DCHECK_IMPLIES(source_.IsInvalid(), destination_.IsInvalid());
695 return source_.IsInvalid();
696 }
697
698 // APIs to aid debugging. For general-stream APIs, use operator<<.
699 void Print() const;
700
701 private:
702 InstructionOperand source_;
703 InstructionOperand destination_;
704
705 DISALLOW_COPY_AND_ASSIGN(MoveOperands);
706};
707
708V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&, const MoveOperands&);
709
710class V8_EXPORT_PRIVATE ParallelMove final
711 : public NON_EXPORTED_BASE(ZoneVector<MoveOperands*>),
712 public NON_EXPORTED_BASE(ZoneObject) {
713 public:
714 explicit ParallelMove(Zone* zone) : ZoneVector<MoveOperands*>(zone) {}
715
716 MoveOperands* AddMove(const InstructionOperand& from,
717 const InstructionOperand& to) {
718 Zone* zone = get_allocator().zone();
719 return AddMove(from, to, zone);
720 }
721
722 MoveOperands* AddMove(const InstructionOperand& from,
723 const InstructionOperand& to,
724 Zone* operand_allocation_zone) {
725 if (from.EqualsCanonicalized(to)) return nullptr;
726 MoveOperands* move = new (operand_allocation_zone) MoveOperands(from, to);
727 if (empty()) reserve(4);
728 push_back(move);
729 return move;
730 }
731
732 bool IsRedundant() const;
733
734 // Prepare this ParallelMove to insert move as if it happened in a subsequent
735 // ParallelMove. move->source() may be changed. Any MoveOperands added to
736 // to_eliminate must be Eliminated.
737 void PrepareInsertAfter(MoveOperands* move,
738 ZoneVector<MoveOperands*>* to_eliminate) const;
739
740 private:
741 DISALLOW_COPY_AND_ASSIGN(ParallelMove);
742};
743
744std::ostream& operator<<(std::ostream&, const ParallelMove&);
745
746class ReferenceMap final : public ZoneObject {
747 public:
748 explicit ReferenceMap(Zone* zone)
749 : reference_operands_(8, zone), instruction_position_(-1) {}
750
751 const ZoneVector<InstructionOperand>& reference_operands() const {
752 return reference_operands_;
753 }
754 int instruction_position() const { return instruction_position_; }
755
756 void set_instruction_position(int pos) {
757 DCHECK_EQ(-1, instruction_position_);
758 instruction_position_ = pos;
759 }
760
761 void RecordReference(const AllocatedOperand& op);
762
763 private:
764 friend std::ostream& operator<<(std::ostream&, const ReferenceMap&);
765
766 ZoneVector<InstructionOperand> reference_operands_;
767 int instruction_position_;
768};
769
770std::ostream& operator<<(std::ostream&, const ReferenceMap&);
771
772class InstructionBlock;
773
774class V8_EXPORT_PRIVATE Instruction final {
775 public:
776 size_t OutputCount() const { return OutputCountField::decode(bit_field_); }
777 const InstructionOperand* OutputAt(size_t i) const {
778 DCHECK(i < OutputCount());
779 return &operands_[i];
780 }
781 InstructionOperand* OutputAt(size_t i) {
782 DCHECK(i < OutputCount());
783 return &operands_[i];
784 }
785
786 bool HasOutput() const { return OutputCount() > 0; }
787 const InstructionOperand* Output() const { return OutputAt(0); }
788 InstructionOperand* Output() { return OutputAt(0); }
789
790 size_t InputCount() const { return InputCountField::decode(bit_field_); }
791 const InstructionOperand* InputAt(size_t i) const {
792 DCHECK(i < InputCount());
793 return &operands_[OutputCount() + i];
794 }
795 InstructionOperand* InputAt(size_t i) {
796 DCHECK(i < InputCount());
797 return &operands_[OutputCount() + i];
798 }
799
800 size_t TempCount() const { return TempCountField::decode(bit_field_); }
801 const InstructionOperand* TempAt(size_t i) const {
802 DCHECK(i < TempCount());
803 return &operands_[OutputCount() + InputCount() + i];
804 }
805 InstructionOperand* TempAt(size_t i) {
806 DCHECK(i < TempCount());
807 return &operands_[OutputCount() + InputCount() + i];
808 }
809
810 InstructionCode opcode() const { return opcode_; }
811 ArchOpcode arch_opcode() const { return ArchOpcodeField::decode(opcode()); }
812 AddressingMode addressing_mode() const {
813 return AddressingModeField::decode(opcode());
814 }
815 FlagsMode flags_mode() const { return FlagsModeField::decode(opcode()); }
816 FlagsCondition flags_condition() const {
817 return FlagsConditionField::decode(opcode());
818 }
819
820 static Instruction* New(Zone* zone, InstructionCode opcode) {
821 return New(zone, opcode, 0, nullptr, 0, nullptr, 0, nullptr);
822 }
823
824 static Instruction* New(Zone* zone, InstructionCode opcode,
825 size_t output_count, InstructionOperand* outputs,
826 size_t input_count, InstructionOperand* inputs,
827 size_t temp_count, InstructionOperand* temps) {
828 DCHECK_LE(0, opcode);
829 DCHECK(output_count == 0 || outputs != nullptr);
830 DCHECK(input_count == 0 || inputs != nullptr);
831 DCHECK(temp_count == 0 || temps != nullptr);
832 // TODO(jarin/mstarzinger): Handle this gracefully. See crbug.com/582702.
833 CHECK(InputCountField::is_valid(input_count));
834
835 size_t total_extra_ops = output_count + input_count + temp_count;
836 if (total_extra_ops != 0) total_extra_ops--;
837 int size = static_cast<int>(
838 RoundUp(sizeof(Instruction), sizeof(InstructionOperand)) +
839 total_extra_ops * sizeof(InstructionOperand));
840 return new (zone->New(size)) Instruction(
841 opcode, output_count, outputs, input_count, inputs, temp_count, temps);
842 }
843
844 Instruction* MarkAsCall() {
845 bit_field_ = IsCallField::update(bit_field_, true);
846 return this;
847 }
848 bool IsCall() const { return IsCallField::decode(bit_field_); }
849 bool NeedsReferenceMap() const { return IsCall(); }
850 bool HasReferenceMap() const { return reference_map_ != nullptr; }
851
852 bool ClobbersRegisters() const { return IsCall(); }
853 bool ClobbersTemps() const { return IsCall(); }
854 bool ClobbersDoubleRegisters() const { return IsCall(); }
855 ReferenceMap* reference_map() const { return reference_map_; }
856
857 void set_reference_map(ReferenceMap* map) {
858 DCHECK(NeedsReferenceMap());
859 DCHECK(!reference_map_);
860 reference_map_ = map;
861 }
862
863 void OverwriteWithNop() {
864 opcode_ = ArchOpcodeField::encode(kArchNop);
865 bit_field_ = 0;
866 reference_map_ = nullptr;
867 }
868
869 bool IsNop() const { return arch_opcode() == kArchNop; }
870
871 bool IsDeoptimizeCall() const {
872 return arch_opcode() == ArchOpcode::kArchDeoptimize ||
873 FlagsModeField::decode(opcode()) == kFlags_deoptimize ||
874 FlagsModeField::decode(opcode()) == kFlags_deoptimize_and_poison;
875 }
876
877 bool IsTrap() const {
878 return FlagsModeField::decode(opcode()) == kFlags_trap;
879 }
880
881 bool IsJump() const { return arch_opcode() == ArchOpcode::kArchJmp; }
882 bool IsRet() const { return arch_opcode() == ArchOpcode::kArchRet; }
883 bool IsTailCall() const {
884 return arch_opcode() == ArchOpcode::kArchTailCallCodeObject ||
885 arch_opcode() == ArchOpcode::kArchTailCallCodeObjectFromJSFunction ||
886 arch_opcode() == ArchOpcode::kArchTailCallAddress ||
887 arch_opcode() == ArchOpcode::kArchTailCallWasm;
888 }
889 bool IsThrow() const {
890 return arch_opcode() == ArchOpcode::kArchThrowTerminator;
891 }
892
893 enum GapPosition {
894 START,
895 END,
896 FIRST_GAP_POSITION = START,
897 LAST_GAP_POSITION = END
898 };
899
900 ParallelMove* GetOrCreateParallelMove(GapPosition pos, Zone* zone) {
901 if (parallel_moves_[pos] == nullptr) {
902 parallel_moves_[pos] = new (zone) ParallelMove(zone);
903 }
904 return parallel_moves_[pos];
905 }
906
907 ParallelMove* GetParallelMove(GapPosition pos) {
908 return parallel_moves_[pos];
909 }
910
911 const ParallelMove* GetParallelMove(GapPosition pos) const {
912 return parallel_moves_[pos];
913 }
914
915 bool AreMovesRedundant() const;
916
917 ParallelMove* const* parallel_moves() const { return &parallel_moves_[0]; }
918 ParallelMove** parallel_moves() { return &parallel_moves_[0]; }
919
920 // The block_id may be invalidated in JumpThreading. It is only important for
921 // register allocation, to avoid searching for blocks from instruction
922 // indexes.
923 InstructionBlock* block() const { return block_; }
924 void set_block(InstructionBlock* block) {
925 DCHECK_NOT_NULL(block);
926 block_ = block;
927 }
928
929 // APIs to aid debugging. For general-stream APIs, use operator<<.
930 void Print() const;
931
932 using OutputCountField = BitField<size_t, 0, 8>;
933 using InputCountField = BitField<size_t, 8, 16>;
934 using TempCountField = BitField<size_t, 24, 6>;
935
936 static const size_t kMaxOutputCount = OutputCountField::kMax;
937 static const size_t kMaxInputCount = InputCountField::kMax;
938 static const size_t kMaxTempCount = TempCountField::kMax;
939
940 private:
941 explicit Instruction(InstructionCode opcode);
942
943 Instruction(InstructionCode opcode, size_t output_count,
944 InstructionOperand* outputs, size_t input_count,
945 InstructionOperand* inputs, size_t temp_count,
946 InstructionOperand* temps);
947
948 using IsCallField = BitField<bool, 30, 1>;
949
950 InstructionCode opcode_;
951 uint32_t bit_field_;
952 ParallelMove* parallel_moves_[2];
953 ReferenceMap* reference_map_;
954 InstructionBlock* block_;
955 InstructionOperand operands_[1];
956
957 DISALLOW_COPY_AND_ASSIGN(Instruction);
958};
959
960std::ostream& operator<<(std::ostream&, const Instruction&);
961
962class RpoNumber final {
963 public:
964 static const int kInvalidRpoNumber = -1;
965 int ToInt() const {
966 DCHECK(IsValid());
967 return index_;
968 }
969 size_t ToSize() const {
970 DCHECK(IsValid());
971 return static_cast<size_t>(index_);
972 }
973 bool IsValid() const { return index_ >= 0; }
974 static RpoNumber FromInt(int index) { return RpoNumber(index); }
975 static RpoNumber Invalid() { return RpoNumber(kInvalidRpoNumber); }
976
977 bool IsNext(const RpoNumber other) const {
978 DCHECK(IsValid());
979 return other.index_ == this->index_ + 1;
980 }
981
982 RpoNumber Next() const {
983 DCHECK(IsValid());
984 return RpoNumber(index_ + 1);
985 }
986
987 // Comparison operators.
988 bool operator==(RpoNumber other) const { return index_ == other.index_; }
989 bool operator!=(RpoNumber other) const { return index_ != other.index_; }
990 bool operator>(RpoNumber other) const { return index_ > other.index_; }
991 bool operator<(RpoNumber other) const { return index_ < other.index_; }
992 bool operator<=(RpoNumber other) const { return index_ <= other.index_; }
993 bool operator>=(RpoNumber other) const { return index_ >= other.index_; }
994
995 private:
996 explicit RpoNumber(int32_t index) : index_(index) {}
997 int32_t index_;
998};
999
1000std::ostream& operator<<(std::ostream&, const RpoNumber&);
1001
1002class V8_EXPORT_PRIVATE Constant final {
1003 public:
1004 enum Type {
1005 kInt32,
1006 kInt64,
1007 kFloat32,
1008 kFloat64,
1009 kExternalReference,
1010 kHeapObject,
1011 kRpoNumber,
1012 kDelayedStringConstant
1013 };
1014
1015 explicit Constant(int32_t v);
1016 explicit Constant(int64_t v) : type_(kInt64), value_(v) {}
1017 explicit Constant(float v) : type_(kFloat32), value_(bit_cast<int32_t>(v)) {}
1018 explicit Constant(double v) : type_(kFloat64), value_(bit_cast<int64_t>(v)) {}
1019 explicit Constant(ExternalReference ref)
1020 : type_(kExternalReference), value_(bit_cast<intptr_t>(ref.address())) {}
1021 explicit Constant(Handle<HeapObject> obj)
1022 : type_(kHeapObject), value_(bit_cast<intptr_t>(obj)) {}
1023 explicit Constant(RpoNumber rpo) : type_(kRpoNumber), value_(rpo.ToInt()) {}
1024 explicit Constant(const StringConstantBase* str)
1025 : type_(kDelayedStringConstant), value_(bit_cast<intptr_t>(str)) {}
1026 explicit Constant(RelocatablePtrConstantInfo info);
1027
1028 Type type() const { return type_; }
1029
1030 RelocInfo::Mode rmode() const { return rmode_; }
1031
1032 int32_t ToInt32() const {
1033 DCHECK(type() == kInt32 || type() == kInt64);
1034 const int32_t value = static_cast<int32_t>(value_);
1035 DCHECK_EQ(value_, static_cast<int64_t>(value));
1036 return value;
1037 }
1038
1039 int64_t ToInt64() const {
1040 if (type() == kInt32) return ToInt32();
1041 DCHECK_EQ(kInt64, type());
1042 return value_;
1043 }
1044
1045 float ToFloat32() const {
1046 // TODO(ahaas): We should remove this function. If value_ has the bit
1047 // representation of a signalling NaN, then returning it as float can cause
1048 // the signalling bit to flip, and value_ is returned as a quiet NaN.
1049 DCHECK_EQ(kFloat32, type());
1050 return bit_cast<float>(static_cast<int32_t>(value_));
1051 }
1052
1053 uint32_t ToFloat32AsInt() const {
1054 DCHECK_EQ(kFloat32, type());
1055 return bit_cast<uint32_t>(static_cast<int32_t>(value_));
1056 }
1057
1058 Double ToFloat64() const {
1059 DCHECK_EQ(kFloat64, type());
1060 return Double(bit_cast<uint64_t>(value_));
1061 }
1062
1063 ExternalReference ToExternalReference() const {
1064 DCHECK_EQ(kExternalReference, type());
1065 return ExternalReference::FromRawAddress(static_cast<Address>(value_));
1066 }
1067
1068 RpoNumber ToRpoNumber() const {
1069 DCHECK_EQ(kRpoNumber, type());
1070 return RpoNumber::FromInt(static_cast<int>(value_));
1071 }
1072
1073 Handle<HeapObject> ToHeapObject() const;
1074 Handle<Code> ToCode() const;
1075 const StringConstantBase* ToDelayedStringConstant() const;
1076
1077 private:
1078 Type type_;
1079 RelocInfo::Mode rmode_ = RelocInfo::NONE;
1080 int64_t value_;
1081};
1082
1083std::ostream& operator<<(std::ostream&, const Constant&);
1084
1085// Forward declarations.
1086class FrameStateDescriptor;
1087
1088enum class StateValueKind : uint8_t {
1089 kArgumentsElements,
1090 kArgumentsLength,
1091 kPlain,
1092 kOptimizedOut,
1093 kNested,
1094 kDuplicate
1095};
1096
1097class StateValueDescriptor {
1098 public:
1099 StateValueDescriptor()
1100 : kind_(StateValueKind::kPlain), type_(MachineType::AnyTagged()) {}
1101
1102 static StateValueDescriptor ArgumentsElements(ArgumentsStateType type) {
1103 StateValueDescriptor descr(StateValueKind::kArgumentsElements,
1104 MachineType::AnyTagged());
1105 descr.args_type_ = type;
1106 return descr;
1107 }
1108 static StateValueDescriptor ArgumentsLength(ArgumentsStateType type) {
1109 StateValueDescriptor descr(StateValueKind::kArgumentsLength,
1110 MachineType::AnyTagged());
1111 descr.args_type_ = type;
1112 return descr;
1113 }
1114 static StateValueDescriptor Plain(MachineType type) {
1115 return StateValueDescriptor(StateValueKind::kPlain, type);
1116 }
1117 static StateValueDescriptor OptimizedOut() {
1118 return StateValueDescriptor(StateValueKind::kOptimizedOut,
1119 MachineType::AnyTagged());
1120 }
1121 static StateValueDescriptor Recursive(size_t id) {
1122 StateValueDescriptor descr(StateValueKind::kNested,
1123 MachineType::AnyTagged());
1124 descr.id_ = id;
1125 return descr;
1126 }
1127 static StateValueDescriptor Duplicate(size_t id) {
1128 StateValueDescriptor descr(StateValueKind::kDuplicate,
1129 MachineType::AnyTagged());
1130 descr.id_ = id;
1131 return descr;
1132 }
1133
1134 bool IsArgumentsElements() const {
1135 return kind_ == StateValueKind::kArgumentsElements;
1136 }
1137 bool IsArgumentsLength() const {
1138 return kind_ == StateValueKind::kArgumentsLength;
1139 }
1140 bool IsPlain() const { return kind_ == StateValueKind::kPlain; }
1141 bool IsOptimizedOut() const { return kind_ == StateValueKind::kOptimizedOut; }
1142 bool IsNested() const { return kind_ == StateValueKind::kNested; }
1143 bool IsDuplicate() const { return kind_ == StateValueKind::kDuplicate; }
1144 MachineType type() const { return type_; }
1145 size_t id() const {
1146 DCHECK(kind_ == StateValueKind::kDuplicate ||
1147 kind_ == StateValueKind::kNested);
1148 return id_;
1149 }
1150 ArgumentsStateType arguments_type() const {
1151 DCHECK(kind_ == StateValueKind::kArgumentsElements ||
1152 kind_ == StateValueKind::kArgumentsLength);
1153 return args_type_;
1154 }
1155
1156 private:
1157 StateValueDescriptor(StateValueKind kind, MachineType type)
1158 : kind_(kind), type_(type) {}
1159
1160 StateValueKind kind_;
1161 MachineType type_;
1162 union {
1163 size_t id_;
1164 ArgumentsStateType args_type_;
1165 };
1166};
1167
1168class StateValueList {
1169 public:
1170 explicit StateValueList(Zone* zone) : fields_(zone), nested_(zone) {}
1171
1172 size_t size() { return fields_.size(); }
1173
1174 struct Value {
1175 StateValueDescriptor* desc;
1176 StateValueList* nested;
1177
1178 Value(StateValueDescriptor* desc, StateValueList* nested)
1179 : desc(desc), nested(nested) {}
1180 };
1181
1182 class iterator {
1183 public:
1184 // Bare minimum of operators needed for range iteration.
1185 bool operator!=(const iterator& other) const {
1186 return field_iterator != other.field_iterator;
1187 }
1188 bool operator==(const iterator& other) const {
1189 return field_iterator == other.field_iterator;
1190 }
1191 iterator& operator++() {
1192 if (field_iterator->IsNested()) {
1193 nested_iterator++;
1194 }
1195 ++field_iterator;
1196 return *this;
1197 }
1198 Value operator*() {
1199 StateValueDescriptor* desc = &(*field_iterator);
1200 StateValueList* nested = desc->IsNested() ? *nested_iterator : nullptr;
1201 return Value(desc, nested);
1202 }
1203
1204 private:
1205 friend class StateValueList;
1206
1207 iterator(ZoneVector<StateValueDescriptor>::iterator it,
1208 ZoneVector<StateValueList*>::iterator nested)
1209 : field_iterator(it), nested_iterator(nested) {}
1210
1211 ZoneVector<StateValueDescriptor>::iterator field_iterator;
1212 ZoneVector<StateValueList*>::iterator nested_iterator;
1213 };
1214
1215 void ReserveSize(size_t size) { fields_.reserve(size); }
1216
1217 StateValueList* PushRecursiveField(Zone* zone, size_t id) {
1218 fields_.push_back(StateValueDescriptor::Recursive(id));
1219 StateValueList* nested =
1220 new (zone->New(sizeof(StateValueList))) StateValueList(zone);
1221 nested_.push_back(nested);
1222 return nested;
1223 }
1224 void PushArgumentsElements(ArgumentsStateType type) {
1225 fields_.push_back(StateValueDescriptor::ArgumentsElements(type));
1226 }
1227 void PushArgumentsLength(ArgumentsStateType type) {
1228 fields_.push_back(StateValueDescriptor::ArgumentsLength(type));
1229 }
1230 void PushDuplicate(size_t id) {
1231 fields_.push_back(StateValueDescriptor::Duplicate(id));
1232 }
1233 void PushPlain(MachineType type) {
1234 fields_.push_back(StateValueDescriptor::Plain(type));
1235 }
1236 void PushOptimizedOut() {
1237 fields_.push_back(StateValueDescriptor::OptimizedOut());
1238 }
1239
1240 iterator begin() { return iterator(fields_.begin(), nested_.begin()); }
1241 iterator end() { return iterator(fields_.end(), nested_.end()); }
1242
1243 private:
1244 ZoneVector<StateValueDescriptor> fields_;
1245 ZoneVector<StateValueList*> nested_;
1246};
1247
1248class FrameStateDescriptor : public ZoneObject {
1249 public:
1250 FrameStateDescriptor(Zone* zone, FrameStateType type, BailoutId bailout_id,
1251 OutputFrameStateCombine state_combine,
1252 size_t parameters_count, size_t locals_count,
1253 size_t stack_count,
1254 MaybeHandle<SharedFunctionInfo> shared_info,
1255 FrameStateDescriptor* outer_state = nullptr);
1256
1257 FrameStateType type() const { return type_; }
1258 BailoutId bailout_id() const { return bailout_id_; }
1259 OutputFrameStateCombine state_combine() const { return frame_state_combine_; }
1260 size_t parameters_count() const { return parameters_count_; }
1261 size_t locals_count() const { return locals_count_; }
1262 size_t stack_count() const { return stack_count_; }
1263 MaybeHandle<SharedFunctionInfo> shared_info() const { return shared_info_; }
1264 FrameStateDescriptor* outer_state() const { return outer_state_; }
1265 bool HasContext() const {
1266 return FrameStateFunctionInfo::IsJSFunctionType(type_) ||
1267 type_ == FrameStateType::kBuiltinContinuation ||
1268 type_ == FrameStateType::kConstructStub;
1269 }
1270
1271 size_t GetSize() const;
1272 size_t GetTotalSize() const;
1273 size_t GetFrameCount() const;
1274 size_t GetJSFrameCount() const;
1275
1276 StateValueList* GetStateValueDescriptors() { return &values_; }
1277
1278 static const int kImpossibleValue = 0xdead;
1279
1280 private:
1281 FrameStateType type_;
1282 BailoutId bailout_id_;
1283 OutputFrameStateCombine frame_state_combine_;
1284 size_t parameters_count_;
1285 size_t locals_count_;
1286 size_t stack_count_;
1287 StateValueList values_;
1288 MaybeHandle<SharedFunctionInfo> const shared_info_;
1289 FrameStateDescriptor* outer_state_;
1290};
1291
1292// A deoptimization entry is a pair of the reason why we deoptimize and the
1293// frame state descriptor that we have to go back to.
1294class DeoptimizationEntry final {
1295 public:
1296 DeoptimizationEntry() = default;
1297 DeoptimizationEntry(FrameStateDescriptor* descriptor, DeoptimizeKind kind,
1298 DeoptimizeReason reason, VectorSlotPair const& feedback)
1299 : descriptor_(descriptor),
1300 kind_(kind),
1301 reason_(reason),
1302 feedback_(feedback) {}
1303
1304 FrameStateDescriptor* descriptor() const { return descriptor_; }
1305 DeoptimizeKind kind() const { return kind_; }
1306 DeoptimizeReason reason() const { return reason_; }
1307 VectorSlotPair const& feedback() const { return feedback_; }
1308
1309 private:
1310 FrameStateDescriptor* descriptor_ = nullptr;
1311 DeoptimizeKind kind_ = DeoptimizeKind::kEager;
1312 DeoptimizeReason reason_ = DeoptimizeReason::kUnknown;
1313 VectorSlotPair feedback_ = VectorSlotPair();
1314};
1315
1316using DeoptimizationVector = ZoneVector<DeoptimizationEntry>;
1317
1318class V8_EXPORT_PRIVATE PhiInstruction final
1319 : public NON_EXPORTED_BASE(ZoneObject) {
1320 public:
1321 using Inputs = ZoneVector<InstructionOperand>;
1322
1323 PhiInstruction(Zone* zone, int virtual_register, size_t input_count);
1324
1325 void SetInput(size_t offset, int virtual_register);
1326 void RenameInput(size_t offset, int virtual_register);
1327
1328 int virtual_register() const { return virtual_register_; }
1329 const IntVector& operands() const { return operands_; }
1330
1331 // TODO(dcarney): this has no real business being here, since it's internal to
1332 // the register allocator, but putting it here was convenient.
1333 const InstructionOperand& output() const { return output_; }
1334 InstructionOperand& output() { return output_; }
1335
1336 private:
1337 const int virtual_register_;
1338 InstructionOperand output_;
1339 IntVector operands_;
1340};
1341
1342// Analogue of BasicBlock for Instructions instead of Nodes.
1343class V8_EXPORT_PRIVATE InstructionBlock final
1344 : public NON_EXPORTED_BASE(ZoneObject) {
1345 public:
1346 InstructionBlock(Zone* zone, RpoNumber rpo_number, RpoNumber loop_header,
1347 RpoNumber loop_end, bool deferred, bool handler);
1348
1349 // Instruction indexes (used by the register allocator).
1350 int first_instruction_index() const {
1351 DCHECK_LE(0, code_start_);
1352 DCHECK_LT(0, code_end_);
1353 DCHECK_GE(code_end_, code_start_);
1354 return code_start_;
1355 }
1356 int last_instruction_index() const {
1357 DCHECK_LE(0, code_start_);
1358 DCHECK_LT(0, code_end_);
1359 DCHECK_GE(code_end_, code_start_);
1360 return code_end_ - 1;
1361 }
1362
1363 int32_t code_start() const { return code_start_; }
1364 void set_code_start(int32_t start) { code_start_ = start; }
1365
1366 int32_t code_end() const { return code_end_; }
1367 void set_code_end(int32_t end) { code_end_ = end; }
1368
1369 bool IsDeferred() const { return deferred_; }
1370 bool IsHandler() const { return handler_; }
1371
1372 RpoNumber ao_number() const { return ao_number_; }
1373 RpoNumber rpo_number() const { return rpo_number_; }
1374 RpoNumber loop_header() const { return loop_header_; }
1375 RpoNumber loop_end() const {
1376 DCHECK(IsLoopHeader());
1377 return loop_end_;
1378 }
1379 inline bool IsLoopHeader() const { return loop_end_.IsValid(); }
1380 inline bool IsSwitchTarget() const { return switch_target_; }
1381 inline bool ShouldAlign() const { return alignment_; }
1382
1383 using Predecessors = ZoneVector<RpoNumber>;
1384 Predecessors& predecessors() { return predecessors_; }
1385 const Predecessors& predecessors() const { return predecessors_; }
1386 size_t PredecessorCount() const { return predecessors_.size(); }
1387 size_t PredecessorIndexOf(RpoNumber rpo_number) const;
1388
1389 using Successors = ZoneVector<RpoNumber>;
1390 Successors& successors() { return successors_; }
1391 const Successors& successors() const { return successors_; }
1392 size_t SuccessorCount() const { return successors_.size(); }
1393
1394 using PhiInstructions = ZoneVector<PhiInstruction*>;
1395 const PhiInstructions& phis() const { return phis_; }
1396 PhiInstruction* PhiAt(size_t i) const { return phis_[i]; }
1397 void AddPhi(PhiInstruction* phi) { phis_.push_back(phi); }
1398
1399 void set_ao_number(RpoNumber ao_number) { ao_number_ = ao_number; }
1400
1401 void set_alignment(bool val) { alignment_ = val; }
1402
1403 void set_switch_target(bool val) { switch_target_ = val; }
1404
1405 bool needs_frame() const { return needs_frame_; }
1406 void mark_needs_frame() { needs_frame_ = true; }
1407
1408 bool must_construct_frame() const { return must_construct_frame_; }
1409 void mark_must_construct_frame() { must_construct_frame_ = true; }
1410
1411 bool must_deconstruct_frame() const { return must_deconstruct_frame_; }
1412 void mark_must_deconstruct_frame() { must_deconstruct_frame_ = true; }
1413
1414 private:
1415 Successors successors_;
1416 Predecessors predecessors_;
1417 PhiInstructions phis_;
1418 RpoNumber ao_number_; // Assembly order number.
1419 const RpoNumber rpo_number_;
1420 const RpoNumber loop_header_;
1421 const RpoNumber loop_end_;
1422 int32_t code_start_; // start index of arch-specific code.
1423 int32_t code_end_ = -1; // end index of arch-specific code.
1424 const bool deferred_ = -1; // Block contains deferred code.
1425 const bool handler_; // Block is a handler entry point.
1426 bool switch_target_ = false;
1427 bool alignment_ = false; // insert alignment before this block
1428 bool needs_frame_ = false;
1429 bool must_construct_frame_ = false;
1430 bool must_deconstruct_frame_ = false;
1431};
1432
1433class InstructionSequence;
1434
1435struct PrintableInstructionBlock {
1436 const InstructionBlock* block_;
1437 const InstructionSequence* code_;
1438};
1439
1440std::ostream& operator<<(std::ostream&, const PrintableInstructionBlock&);
1441
1442using ConstantDeque = ZoneDeque<Constant>;
1443using ConstantMap = std::map<int, Constant, std::less<int>,
1444 ZoneAllocator<std::pair<const int, Constant> > >;
1445
1446using InstructionDeque = ZoneDeque<Instruction*>;
1447using ReferenceMapDeque = ZoneDeque<ReferenceMap*>;
1448using InstructionBlocks = ZoneVector<InstructionBlock*>;
1449
1450// Represents architecture-specific generated code before, during, and after
1451// register allocation.
1452class V8_EXPORT_PRIVATE InstructionSequence final
1453 : public NON_EXPORTED_BASE(ZoneObject) {
1454 public:
1455 static InstructionBlocks* InstructionBlocksFor(Zone* zone,
1456 const Schedule* schedule);
1457 InstructionSequence(Isolate* isolate, Zone* zone,
1458 InstructionBlocks* instruction_blocks);
1459
1460 int NextVirtualRegister();
1461 int VirtualRegisterCount() const { return next_virtual_register_; }
1462
1463 const InstructionBlocks& instruction_blocks() const {
1464 return *instruction_blocks_;
1465 }
1466
1467 const InstructionBlocks& ao_blocks() const { return *ao_blocks_; }
1468
1469 int InstructionBlockCount() const {
1470 return static_cast<int>(instruction_blocks_->size());
1471 }
1472
1473 InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) {
1474 return instruction_blocks_->at(rpo_number.ToSize());
1475 }
1476
1477 int LastLoopInstructionIndex(const InstructionBlock* block) {
1478 return instruction_blocks_->at(block->loop_end().ToSize() - 1)
1479 ->last_instruction_index();
1480 }
1481
1482 const InstructionBlock* InstructionBlockAt(RpoNumber rpo_number) const {
1483 return instruction_blocks_->at(rpo_number.ToSize());
1484 }
1485
1486 InstructionBlock* GetInstructionBlock(int instruction_index) const;
1487
1488 static MachineRepresentation DefaultRepresentation() {
1489 return MachineType::PointerRepresentation();
1490 }
1491 MachineRepresentation GetRepresentation(int virtual_register) const;
1492 void MarkAsRepresentation(MachineRepresentation rep, int virtual_register);
1493
1494 bool IsReference(int virtual_register) const {
1495 return CanBeTaggedOrCompressedPointer(GetRepresentation(virtual_register));
1496 }
1497 bool IsFP(int virtual_register) const {
1498 return IsFloatingPoint(GetRepresentation(virtual_register));
1499 }
1500 int representation_mask() const { return representation_mask_; }
1501 bool HasFPVirtualRegisters() const {
1502 constexpr int kFPRepMask =
1503 RepresentationBit(MachineRepresentation::kFloat32) |
1504 RepresentationBit(MachineRepresentation::kFloat64) |
1505 RepresentationBit(MachineRepresentation::kSimd128);
1506 return (representation_mask() & kFPRepMask) != 0;
1507 }
1508
1509 Instruction* GetBlockStart(RpoNumber rpo) const;
1510
1511 using const_iterator = InstructionDeque::const_iterator;
1512 const_iterator begin() const { return instructions_.begin(); }
1513 const_iterator end() const { return instructions_.end(); }
1514 const InstructionDeque& instructions() const { return instructions_; }
1515 int LastInstructionIndex() const {
1516 return static_cast<int>(instructions().size()) - 1;
1517 }
1518
1519 Instruction* InstructionAt(int index) const {
1520 DCHECK_LE(0, index);
1521 DCHECK_GT(instructions_.size(), index);
1522 return instructions_[index];
1523 }
1524
1525 Isolate* isolate() const { return isolate_; }
1526 const ReferenceMapDeque* reference_maps() const { return &reference_maps_; }
1527 Zone* zone() const { return zone_; }
1528
1529 // Used by the instruction selector while adding instructions.
1530 int AddInstruction(Instruction* instr);
1531 void StartBlock(RpoNumber rpo);
1532 void EndBlock(RpoNumber rpo);
1533
1534 int AddConstant(int virtual_register, Constant constant) {
1535 // TODO(titzer): allow RPO numbers as constants?
1536 DCHECK_NE(Constant::kRpoNumber, constant.type());
1537 DCHECK(virtual_register >= 0 && virtual_register < next_virtual_register_);
1538 DCHECK(constants_.find(virtual_register) == constants_.end());
1539 constants_.insert(std::make_pair(virtual_register, constant));
1540 return virtual_register;
1541 }
1542 Constant GetConstant(int virtual_register) const {
1543 ConstantMap::const_iterator it = constants_.find(virtual_register);
1544 DCHECK(it != constants_.end());
1545 DCHECK_EQ(virtual_register, it->first);
1546 return it->second;
1547 }
1548
1549 using Immediates = ZoneVector<Constant>;
1550 Immediates& immediates() { return immediates_; }
1551
1552 ImmediateOperand AddImmediate(const Constant& constant) {
1553 if (constant.type() == Constant::kInt32 &&
1554 RelocInfo::IsNone(constant.rmode())) {
1555 return ImmediateOperand(ImmediateOperand::INLINE, constant.ToInt32());
1556 }
1557 int index = static_cast<int>(immediates_.size());
1558 immediates_.push_back(constant);
1559 return ImmediateOperand(ImmediateOperand::INDEXED, index);
1560 }
1561
1562 Constant GetImmediate(const ImmediateOperand* op) const {
1563 switch (op->type()) {
1564 case ImmediateOperand::INLINE:
1565 return Constant(op->inline_value());
1566 case ImmediateOperand::INDEXED: {
1567 int index = op->indexed_value();
1568 DCHECK_LE(0, index);
1569 DCHECK_GT(immediates_.size(), index);
1570 return immediates_[index];
1571 }
1572 }
1573 UNREACHABLE();
1574 }
1575
1576 int AddDeoptimizationEntry(FrameStateDescriptor* descriptor,
1577 DeoptimizeKind kind, DeoptimizeReason reason,
1578 VectorSlotPair const& feedback);
1579 DeoptimizationEntry const& GetDeoptimizationEntry(int deoptimization_id);
1580 int GetDeoptimizationEntryCount() const {
1581 return static_cast<int>(deoptimization_entries_.size());
1582 }
1583
1584 RpoNumber InputRpo(Instruction* instr, size_t index);
1585
1586 bool GetSourcePosition(const Instruction* instr,
1587 SourcePosition* result) const;
1588 void SetSourcePosition(const Instruction* instr, SourcePosition value);
1589
1590 bool ContainsCall() const {
1591 for (Instruction* instr : instructions_) {
1592 if (instr->IsCall()) return true;
1593 }
1594 return false;
1595 }
1596
1597 // APIs to aid debugging. For general-stream APIs, use operator<<.
1598 void Print() const;
1599
1600 void PrintBlock(int block_id) const;
1601
1602 void ValidateEdgeSplitForm() const;
1603 void ValidateDeferredBlockExitPaths() const;
1604 void ValidateDeferredBlockEntryPaths() const;
1605 void ValidateSSA() const;
1606
1607 static void SetRegisterConfigurationForTesting(
1608 const RegisterConfiguration* regConfig);
1609 static void ClearRegisterConfigurationForTesting();
1610
1611 void RecomputeAssemblyOrderForTesting();
1612
1613 private:
1614 friend V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&,
1615 const InstructionSequence&);
1616
1617 using SourcePositionMap = ZoneMap<const Instruction*, SourcePosition>;
1618
1619 static const RegisterConfiguration* RegisterConfigurationForTesting();
1620 static const RegisterConfiguration* registerConfigurationForTesting_;
1621
1622 // Puts the deferred blocks last and may rotate loops.
1623 void ComputeAssemblyOrder();
1624
1625 Isolate* isolate_;
1626 Zone* const zone_;
1627 InstructionBlocks* const instruction_blocks_;
1628 InstructionBlocks* ao_blocks_;
1629 SourcePositionMap source_positions_;
1630 ConstantMap constants_;
1631 Immediates immediates_;
1632 InstructionDeque instructions_;
1633 int next_virtual_register_;
1634 ReferenceMapDeque reference_maps_;
1635 ZoneVector<MachineRepresentation> representations_;
1636 int representation_mask_;
1637 DeoptimizationVector deoptimization_entries_;
1638
1639 // Used at construction time
1640 InstructionBlock* current_block_;
1641
1642 DISALLOW_COPY_AND_ASSIGN(InstructionSequence);
1643};
1644
1645V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream&,
1646 const InstructionSequence&);
1647
1648} // namespace compiler
1649} // namespace internal
1650} // namespace v8
1651
1652#endif // V8_COMPILER_BACKEND_INSTRUCTION_H_
1653