1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/backend/instruction.h"
6
7#include <iomanip>
8
9#include "src/compiler/common-operator.h"
10#include "src/compiler/graph.h"
11#include "src/compiler/schedule.h"
12#include "src/compiler/state-values-utils.h"
13#include "src/register-configuration.h"
14#include "src/source-position.h"
15
16namespace v8 {
17namespace internal {
18namespace compiler {
19
20const RegisterConfiguration* (*GetRegConfig)() = RegisterConfiguration::Default;
21
22FlagsCondition CommuteFlagsCondition(FlagsCondition condition) {
23 switch (condition) {
24 case kSignedLessThan:
25 return kSignedGreaterThan;
26 case kSignedGreaterThanOrEqual:
27 return kSignedLessThanOrEqual;
28 case kSignedLessThanOrEqual:
29 return kSignedGreaterThanOrEqual;
30 case kSignedGreaterThan:
31 return kSignedLessThan;
32 case kUnsignedLessThan:
33 return kUnsignedGreaterThan;
34 case kUnsignedGreaterThanOrEqual:
35 return kUnsignedLessThanOrEqual;
36 case kUnsignedLessThanOrEqual:
37 return kUnsignedGreaterThanOrEqual;
38 case kUnsignedGreaterThan:
39 return kUnsignedLessThan;
40 case kFloatLessThanOrUnordered:
41 return kFloatGreaterThanOrUnordered;
42 case kFloatGreaterThanOrEqual:
43 return kFloatLessThanOrEqual;
44 case kFloatLessThanOrEqual:
45 return kFloatGreaterThanOrEqual;
46 case kFloatGreaterThanOrUnordered:
47 return kFloatLessThanOrUnordered;
48 case kFloatLessThan:
49 return kFloatGreaterThan;
50 case kFloatGreaterThanOrEqualOrUnordered:
51 return kFloatLessThanOrEqualOrUnordered;
52 case kFloatLessThanOrEqualOrUnordered:
53 return kFloatGreaterThanOrEqualOrUnordered;
54 case kFloatGreaterThan:
55 return kFloatLessThan;
56 case kPositiveOrZero:
57 case kNegative:
58 UNREACHABLE();
59 break;
60 case kEqual:
61 case kNotEqual:
62 case kOverflow:
63 case kNotOverflow:
64 case kUnorderedEqual:
65 case kUnorderedNotEqual:
66 return condition;
67 }
68 UNREACHABLE();
69}
70
71bool InstructionOperand::InterferesWith(const InstructionOperand& other) const {
72 if (kSimpleFPAliasing || !this->IsFPLocationOperand() ||
73 !other.IsFPLocationOperand())
74 return EqualsCanonicalized(other);
75 // Aliasing is complex and both operands are fp locations.
76 const LocationOperand& loc = *LocationOperand::cast(this);
77 const LocationOperand& other_loc = LocationOperand::cast(other);
78 LocationOperand::LocationKind kind = loc.location_kind();
79 LocationOperand::LocationKind other_kind = other_loc.location_kind();
80 if (kind != other_kind) return false;
81 MachineRepresentation rep = loc.representation();
82 MachineRepresentation other_rep = other_loc.representation();
83 if (rep == other_rep) return EqualsCanonicalized(other);
84 if (kind == LocationOperand::REGISTER) {
85 // FP register-register interference.
86 return GetRegConfig()->AreAliases(rep, loc.register_code(), other_rep,
87 other_loc.register_code());
88 } else {
89 // FP slot-slot interference. Slots of different FP reps can alias because
90 // the gap resolver may break a move into 2 or 4 equivalent smaller moves.
91 DCHECK_EQ(LocationOperand::STACK_SLOT, kind);
92 int index_hi = loc.index();
93 int index_lo =
94 index_hi - (1 << ElementSizeLog2Of(rep)) / kSystemPointerSize + 1;
95 int other_index_hi = other_loc.index();
96 int other_index_lo =
97 other_index_hi -
98 (1 << ElementSizeLog2Of(other_rep)) / kSystemPointerSize + 1;
99 return other_index_hi >= index_lo && index_hi >= other_index_lo;
100 }
101 return false;
102}
103
104bool LocationOperand::IsCompatible(LocationOperand* op) {
105 if (IsRegister() || IsStackSlot()) {
106 return op->IsRegister() || op->IsStackSlot();
107 } else if (kSimpleFPAliasing) {
108 // A backend may choose to generate the same instruction sequence regardless
109 // of the FP representation. As a result, we can relax the compatibility and
110 // allow a Double to be moved in a Float for example. However, this is only
111 // allowed if registers do not overlap.
112 return (IsFPRegister() || IsFPStackSlot()) &&
113 (op->IsFPRegister() || op->IsFPStackSlot());
114 } else if (IsFloatRegister() || IsFloatStackSlot()) {
115 return op->IsFloatRegister() || op->IsFloatStackSlot();
116 } else if (IsDoubleRegister() || IsDoubleStackSlot()) {
117 return op->IsDoubleRegister() || op->IsDoubleStackSlot();
118 } else {
119 return (IsSimd128Register() || IsSimd128StackSlot()) &&
120 (op->IsSimd128Register() || op->IsSimd128StackSlot());
121 }
122}
123
124void InstructionOperand::Print() const { StdoutStream{} << *this << std::endl; }
125
126std::ostream& operator<<(std::ostream& os, const InstructionOperand& op) {
127 switch (op.kind()) {
128 case InstructionOperand::UNALLOCATED: {
129 const UnallocatedOperand* unalloc = UnallocatedOperand::cast(&op);
130 os << "v" << unalloc->virtual_register();
131 if (unalloc->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
132 return os << "(=" << unalloc->fixed_slot_index() << "S)";
133 }
134 switch (unalloc->extended_policy()) {
135 case UnallocatedOperand::NONE:
136 return os;
137 case UnallocatedOperand::FIXED_REGISTER:
138 return os << "(="
139 << Register::from_code(unalloc->fixed_register_index())
140 << ")";
141 case UnallocatedOperand::FIXED_FP_REGISTER:
142 return os << "(="
143 << DoubleRegister::from_code(
144 unalloc->fixed_register_index())
145 << ")";
146 case UnallocatedOperand::MUST_HAVE_REGISTER:
147 return os << "(R)";
148 case UnallocatedOperand::MUST_HAVE_SLOT:
149 return os << "(S)";
150 case UnallocatedOperand::SAME_AS_FIRST_INPUT:
151 return os << "(1)";
152 case UnallocatedOperand::REGISTER_OR_SLOT:
153 return os << "(-)";
154 case UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT:
155 return os << "(*)";
156 }
157 }
158 case InstructionOperand::CONSTANT:
159 return os << "[constant:" << ConstantOperand::cast(op).virtual_register()
160 << "]";
161 case InstructionOperand::IMMEDIATE: {
162 ImmediateOperand imm = ImmediateOperand::cast(op);
163 switch (imm.type()) {
164 case ImmediateOperand::INLINE:
165 return os << "#" << imm.inline_value();
166 case ImmediateOperand::INDEXED:
167 return os << "[immediate:" << imm.indexed_value() << "]";
168 }
169 }
170 case InstructionOperand::EXPLICIT:
171 case InstructionOperand::ALLOCATED: {
172 LocationOperand allocated = LocationOperand::cast(op);
173 if (op.IsStackSlot()) {
174 os << "[stack:" << allocated.index();
175 } else if (op.IsFPStackSlot()) {
176 os << "[fp_stack:" << allocated.index();
177 } else if (op.IsRegister()) {
178 const char* name =
179 allocated.register_code() < Register::kNumRegisters
180 ? RegisterName(Register::from_code(allocated.register_code()))
181 : Register::GetSpecialRegisterName(allocated.register_code());
182 os << "[" << name << "|R";
183 } else if (op.IsDoubleRegister()) {
184 os << "[" << DoubleRegister::from_code(allocated.register_code())
185 << "|R";
186 } else if (op.IsFloatRegister()) {
187 os << "[" << FloatRegister::from_code(allocated.register_code())
188 << "|R";
189 } else {
190 DCHECK(op.IsSimd128Register());
191 os << "[" << Simd128Register::from_code(allocated.register_code())
192 << "|R";
193 }
194 if (allocated.IsExplicit()) {
195 os << "|E";
196 }
197 switch (allocated.representation()) {
198 case MachineRepresentation::kNone:
199 os << "|-";
200 break;
201 case MachineRepresentation::kBit:
202 os << "|b";
203 break;
204 case MachineRepresentation::kWord8:
205 os << "|w8";
206 break;
207 case MachineRepresentation::kWord16:
208 os << "|w16";
209 break;
210 case MachineRepresentation::kWord32:
211 os << "|w32";
212 break;
213 case MachineRepresentation::kWord64:
214 os << "|w64";
215 break;
216 case MachineRepresentation::kFloat32:
217 os << "|f32";
218 break;
219 case MachineRepresentation::kFloat64:
220 os << "|f64";
221 break;
222 case MachineRepresentation::kSimd128:
223 os << "|s128";
224 break;
225 case MachineRepresentation::kTaggedSigned:
226 os << "|ts";
227 break;
228 case MachineRepresentation::kTaggedPointer:
229 os << "|tp";
230 break;
231 case MachineRepresentation::kTagged:
232 os << "|t";
233 break;
234 case MachineRepresentation::kCompressedSigned:
235 os << "|cs";
236 break;
237 case MachineRepresentation::kCompressedPointer:
238 os << "|cp";
239 break;
240 case MachineRepresentation::kCompressed:
241 os << "|c";
242 break;
243 }
244 return os << "]";
245 }
246 case InstructionOperand::INVALID:
247 return os << "(x)";
248 }
249 UNREACHABLE();
250}
251
252void MoveOperands::Print() const {
253 StdoutStream{} << destination() << " = " << source() << std::endl;
254}
255
256std::ostream& operator<<(std::ostream& os, const MoveOperands& mo) {
257 os << mo.destination();
258 if (!mo.source().Equals(mo.destination())) {
259 os << " = " << mo.source();
260 }
261 return os << ";";
262}
263
264bool ParallelMove::IsRedundant() const {
265 for (MoveOperands* move : *this) {
266 if (!move->IsRedundant()) return false;
267 }
268 return true;
269}
270
271void ParallelMove::PrepareInsertAfter(
272 MoveOperands* move, ZoneVector<MoveOperands*>* to_eliminate) const {
273 bool no_aliasing =
274 kSimpleFPAliasing || !move->destination().IsFPLocationOperand();
275 MoveOperands* replacement = nullptr;
276 MoveOperands* eliminated = nullptr;
277 for (MoveOperands* curr : *this) {
278 if (curr->IsEliminated()) continue;
279 if (curr->destination().EqualsCanonicalized(move->source())) {
280 // We must replace move's source with curr's destination in order to
281 // insert it into this ParallelMove.
282 DCHECK(!replacement);
283 replacement = curr;
284 if (no_aliasing && eliminated != nullptr) break;
285 } else if (curr->destination().InterferesWith(move->destination())) {
286 // We can eliminate curr, since move overwrites at least a part of its
287 // destination, implying its value is no longer live.
288 eliminated = curr;
289 to_eliminate->push_back(curr);
290 if (no_aliasing && replacement != nullptr) break;
291 }
292 }
293 if (replacement != nullptr) move->set_source(replacement->source());
294}
295
296ExplicitOperand::ExplicitOperand(LocationKind kind, MachineRepresentation rep,
297 int index)
298 : LocationOperand(EXPLICIT, kind, rep, index) {
299 DCHECK_IMPLIES(kind == REGISTER && !IsFloatingPoint(rep),
300 GetRegConfig()->IsAllocatableGeneralCode(index));
301 DCHECK_IMPLIES(kind == REGISTER && rep == MachineRepresentation::kFloat32,
302 GetRegConfig()->IsAllocatableFloatCode(index));
303 DCHECK_IMPLIES(kind == REGISTER && (rep == MachineRepresentation::kFloat64),
304 GetRegConfig()->IsAllocatableDoubleCode(index));
305}
306
307Instruction::Instruction(InstructionCode opcode)
308 : opcode_(opcode),
309 bit_field_(OutputCountField::encode(0) | InputCountField::encode(0) |
310 TempCountField::encode(0) | IsCallField::encode(false)),
311 reference_map_(nullptr),
312 block_(nullptr) {
313 parallel_moves_[0] = nullptr;
314 parallel_moves_[1] = nullptr;
315}
316
317Instruction::Instruction(InstructionCode opcode, size_t output_count,
318 InstructionOperand* outputs, size_t input_count,
319 InstructionOperand* inputs, size_t temp_count,
320 InstructionOperand* temps)
321 : opcode_(opcode),
322 bit_field_(OutputCountField::encode(output_count) |
323 InputCountField::encode(input_count) |
324 TempCountField::encode(temp_count) |
325 IsCallField::encode(false)),
326 reference_map_(nullptr),
327 block_(nullptr) {
328 parallel_moves_[0] = nullptr;
329 parallel_moves_[1] = nullptr;
330 size_t offset = 0;
331 for (size_t i = 0; i < output_count; ++i) {
332 DCHECK(!outputs[i].IsInvalid());
333 operands_[offset++] = outputs[i];
334 }
335 for (size_t i = 0; i < input_count; ++i) {
336 DCHECK(!inputs[i].IsInvalid());
337 operands_[offset++] = inputs[i];
338 }
339 for (size_t i = 0; i < temp_count; ++i) {
340 DCHECK(!temps[i].IsInvalid());
341 operands_[offset++] = temps[i];
342 }
343}
344
345bool Instruction::AreMovesRedundant() const {
346 for (int i = Instruction::FIRST_GAP_POSITION;
347 i <= Instruction::LAST_GAP_POSITION; i++) {
348 if (parallel_moves_[i] != nullptr && !parallel_moves_[i]->IsRedundant()) {
349 return false;
350 }
351 }
352 return true;
353}
354
355void Instruction::Print() const { StdoutStream{} << *this << std::endl; }
356
357std::ostream& operator<<(std::ostream& os, const ParallelMove& pm) {
358 const char* space = "";
359 for (MoveOperands* move : pm) {
360 if (move->IsEliminated()) continue;
361 os << space << *move;
362 space = " ";
363 }
364 return os;
365}
366
367void ReferenceMap::RecordReference(const AllocatedOperand& op) {
368 // Do not record arguments as pointers.
369 if (op.IsStackSlot() && LocationOperand::cast(op).index() < 0) return;
370 DCHECK(!op.IsFPRegister() && !op.IsFPStackSlot());
371 reference_operands_.push_back(op);
372}
373
374std::ostream& operator<<(std::ostream& os, const ReferenceMap& pm) {
375 os << "{";
376 const char* separator = "";
377 for (const InstructionOperand& op : pm.reference_operands_) {
378 os << separator << op;
379 separator = ";";
380 }
381 return os << "}";
382}
383
384std::ostream& operator<<(std::ostream& os, const ArchOpcode& ao) {
385 switch (ao) {
386#define CASE(Name) \
387 case k##Name: \
388 return os << #Name;
389 ARCH_OPCODE_LIST(CASE)
390#undef CASE
391 }
392 UNREACHABLE();
393}
394
395std::ostream& operator<<(std::ostream& os, const AddressingMode& am) {
396 switch (am) {
397 case kMode_None:
398 return os;
399#define CASE(Name) \
400 case kMode_##Name: \
401 return os << #Name;
402 TARGET_ADDRESSING_MODE_LIST(CASE)
403#undef CASE
404 }
405 UNREACHABLE();
406}
407
408std::ostream& operator<<(std::ostream& os, const FlagsMode& fm) {
409 switch (fm) {
410 case kFlags_none:
411 return os;
412 case kFlags_branch:
413 return os << "branch";
414 case kFlags_branch_and_poison:
415 return os << "branch_and_poison";
416 case kFlags_deoptimize:
417 return os << "deoptimize";
418 case kFlags_deoptimize_and_poison:
419 return os << "deoptimize_and_poison";
420 case kFlags_set:
421 return os << "set";
422 case kFlags_trap:
423 return os << "trap";
424 }
425 UNREACHABLE();
426}
427
428std::ostream& operator<<(std::ostream& os, const FlagsCondition& fc) {
429 switch (fc) {
430 case kEqual:
431 return os << "equal";
432 case kNotEqual:
433 return os << "not equal";
434 case kSignedLessThan:
435 return os << "signed less than";
436 case kSignedGreaterThanOrEqual:
437 return os << "signed greater than or equal";
438 case kSignedLessThanOrEqual:
439 return os << "signed less than or equal";
440 case kSignedGreaterThan:
441 return os << "signed greater than";
442 case kUnsignedLessThan:
443 return os << "unsigned less than";
444 case kUnsignedGreaterThanOrEqual:
445 return os << "unsigned greater than or equal";
446 case kUnsignedLessThanOrEqual:
447 return os << "unsigned less than or equal";
448 case kUnsignedGreaterThan:
449 return os << "unsigned greater than";
450 case kFloatLessThanOrUnordered:
451 return os << "less than or unordered (FP)";
452 case kFloatGreaterThanOrEqual:
453 return os << "greater than or equal (FP)";
454 case kFloatLessThanOrEqual:
455 return os << "less than or equal (FP)";
456 case kFloatGreaterThanOrUnordered:
457 return os << "greater than or unordered (FP)";
458 case kFloatLessThan:
459 return os << "less than (FP)";
460 case kFloatGreaterThanOrEqualOrUnordered:
461 return os << "greater than, equal or unordered (FP)";
462 case kFloatLessThanOrEqualOrUnordered:
463 return os << "less than, equal or unordered (FP)";
464 case kFloatGreaterThan:
465 return os << "greater than (FP)";
466 case kUnorderedEqual:
467 return os << "unordered equal";
468 case kUnorderedNotEqual:
469 return os << "unordered not equal";
470 case kOverflow:
471 return os << "overflow";
472 case kNotOverflow:
473 return os << "not overflow";
474 case kPositiveOrZero:
475 return os << "positive or zero";
476 case kNegative:
477 return os << "negative";
478 }
479 UNREACHABLE();
480}
481
482std::ostream& operator<<(std::ostream& os, const Instruction& instr) {
483 os << "gap ";
484 for (int i = Instruction::FIRST_GAP_POSITION;
485 i <= Instruction::LAST_GAP_POSITION; i++) {
486 os << "(";
487 if (instr.parallel_moves()[i] != nullptr) {
488 os << *instr.parallel_moves()[i];
489 }
490 os << ") ";
491 }
492 os << "\n ";
493
494 if (instr.OutputCount() == 1) {
495 os << *instr.OutputAt(0) << " = ";
496 } else if (instr.OutputCount() > 1) {
497 os << "(" << *instr.OutputAt(0);
498 for (size_t i = 1; i < instr.OutputCount(); i++) {
499 os << ", " << *instr.OutputAt(i);
500 }
501 os << ") = ";
502 }
503
504 os << ArchOpcodeField::decode(instr.opcode());
505 AddressingMode am = AddressingModeField::decode(instr.opcode());
506 if (am != kMode_None) {
507 os << " : " << AddressingModeField::decode(instr.opcode());
508 }
509 FlagsMode fm = FlagsModeField::decode(instr.opcode());
510 if (fm != kFlags_none) {
511 os << " && " << fm << " if " << FlagsConditionField::decode(instr.opcode());
512 }
513 for (size_t i = 0; i < instr.InputCount(); i++) {
514 os << " " << *instr.InputAt(i);
515 }
516 return os;
517}
518
519Constant::Constant(int32_t v) : type_(kInt32), value_(v) {}
520
521Constant::Constant(RelocatablePtrConstantInfo info) {
522 if (info.type() == RelocatablePtrConstantInfo::kInt32) {
523 type_ = kInt32;
524 } else if (info.type() == RelocatablePtrConstantInfo::kInt64) {
525 type_ = kInt64;
526 } else {
527 UNREACHABLE();
528 }
529 value_ = info.value();
530 rmode_ = info.rmode();
531}
532
533Handle<HeapObject> Constant::ToHeapObject() const {
534 DCHECK_EQ(kHeapObject, type());
535 Handle<HeapObject> value(
536 reinterpret_cast<Address*>(static_cast<intptr_t>(value_)));
537 return value;
538}
539
540Handle<Code> Constant::ToCode() const {
541 DCHECK_EQ(kHeapObject, type());
542 Handle<Code> value(reinterpret_cast<Address*>(static_cast<intptr_t>(value_)));
543 return value;
544}
545
546const StringConstantBase* Constant::ToDelayedStringConstant() const {
547 DCHECK_EQ(kDelayedStringConstant, type());
548 const StringConstantBase* value =
549 bit_cast<StringConstantBase*>(static_cast<intptr_t>(value_));
550 return value;
551}
552
553std::ostream& operator<<(std::ostream& os, const Constant& constant) {
554 switch (constant.type()) {
555 case Constant::kInt32:
556 return os << constant.ToInt32();
557 case Constant::kInt64:
558 return os << constant.ToInt64() << "l";
559 case Constant::kFloat32:
560 return os << constant.ToFloat32() << "f";
561 case Constant::kFloat64:
562 return os << constant.ToFloat64().value();
563 case Constant::kExternalReference:
564 return os << constant.ToExternalReference().address();
565 case Constant::kHeapObject:
566 return os << Brief(*constant.ToHeapObject());
567 case Constant::kRpoNumber:
568 return os << "RPO" << constant.ToRpoNumber().ToInt();
569 case Constant::kDelayedStringConstant:
570 return os << "DelayedStringConstant: "
571 << constant.ToDelayedStringConstant();
572 }
573 UNREACHABLE();
574}
575
576PhiInstruction::PhiInstruction(Zone* zone, int virtual_register,
577 size_t input_count)
578 : virtual_register_(virtual_register),
579 output_(UnallocatedOperand(UnallocatedOperand::NONE, virtual_register)),
580 operands_(input_count, InstructionOperand::kInvalidVirtualRegister,
581 zone) {}
582
583void PhiInstruction::SetInput(size_t offset, int virtual_register) {
584 DCHECK_EQ(InstructionOperand::kInvalidVirtualRegister, operands_[offset]);
585 operands_[offset] = virtual_register;
586}
587
588void PhiInstruction::RenameInput(size_t offset, int virtual_register) {
589 DCHECK_NE(InstructionOperand::kInvalidVirtualRegister, operands_[offset]);
590 operands_[offset] = virtual_register;
591}
592
593InstructionBlock::InstructionBlock(Zone* zone, RpoNumber rpo_number,
594 RpoNumber loop_header, RpoNumber loop_end,
595 bool deferred, bool handler)
596 : successors_(zone),
597 predecessors_(zone),
598 phis_(zone),
599 ao_number_(RpoNumber::Invalid()),
600 rpo_number_(rpo_number),
601 loop_header_(loop_header),
602 loop_end_(loop_end),
603 deferred_(deferred),
604 handler_(handler) {}
605
606size_t InstructionBlock::PredecessorIndexOf(RpoNumber rpo_number) const {
607 size_t j = 0;
608 for (InstructionBlock::Predecessors::const_iterator i = predecessors_.begin();
609 i != predecessors_.end(); ++i, ++j) {
610 if (*i == rpo_number) break;
611 }
612 return j;
613}
614
615static RpoNumber GetRpo(const BasicBlock* block) {
616 if (block == nullptr) return RpoNumber::Invalid();
617 return RpoNumber::FromInt(block->rpo_number());
618}
619
620static RpoNumber GetLoopEndRpo(const BasicBlock* block) {
621 if (!block->IsLoopHeader()) return RpoNumber::Invalid();
622 return RpoNumber::FromInt(block->loop_end()->rpo_number());
623}
624
625static InstructionBlock* InstructionBlockFor(Zone* zone,
626 const BasicBlock* block) {
627 bool is_handler =
628 !block->empty() && block->front()->opcode() == IrOpcode::kIfException;
629 InstructionBlock* instr_block = new (zone)
630 InstructionBlock(zone, GetRpo(block), GetRpo(block->loop_header()),
631 GetLoopEndRpo(block), block->deferred(), is_handler);
632 // Map successors and precessors
633 instr_block->successors().reserve(block->SuccessorCount());
634 for (BasicBlock* successor : block->successors()) {
635 instr_block->successors().push_back(GetRpo(successor));
636 }
637 instr_block->predecessors().reserve(block->PredecessorCount());
638 for (BasicBlock* predecessor : block->predecessors()) {
639 instr_block->predecessors().push_back(GetRpo(predecessor));
640 }
641 if (block->PredecessorCount() == 1 &&
642 block->predecessors()[0]->control() == BasicBlock::Control::kSwitch) {
643 instr_block->set_switch_target(true);
644 }
645 return instr_block;
646}
647
648std::ostream& operator<<(std::ostream& os,
649 const PrintableInstructionBlock& printable_block) {
650 const InstructionBlock* block = printable_block.block_;
651 const InstructionSequence* code = printable_block.code_;
652
653 os << "B" << block->rpo_number();
654 if (block->ao_number().IsValid()) {
655 os << ": AO#" << block->ao_number();
656 } else {
657 os << ": AO#?";
658 }
659 if (block->IsDeferred()) os << " (deferred)";
660 if (!block->needs_frame()) os << " (no frame)";
661 if (block->must_construct_frame()) os << " (construct frame)";
662 if (block->must_deconstruct_frame()) os << " (deconstruct frame)";
663 if (block->IsLoopHeader()) {
664 os << " loop blocks: [" << block->rpo_number() << ", " << block->loop_end()
665 << ")";
666 }
667 os << " instructions: [" << block->code_start() << ", " << block->code_end()
668 << ")" << std::endl
669 << " predecessors:";
670
671 for (RpoNumber pred : block->predecessors()) {
672 os << " B" << pred.ToInt();
673 }
674 os << std::endl;
675
676 for (const PhiInstruction* phi : block->phis()) {
677 os << " phi: " << phi->output() << " =";
678 for (int input : phi->operands()) {
679 os << " v" << input;
680 }
681 os << std::endl;
682 }
683
684 for (int j = block->first_instruction_index();
685 j <= block->last_instruction_index(); j++) {
686 os << " " << std::setw(5) << j << ": " << *code->InstructionAt(j)
687 << std::endl;
688 }
689
690 os << " successors:";
691 for (RpoNumber succ : block->successors()) {
692 os << " B" << succ.ToInt();
693 }
694 os << std::endl;
695 return os;
696}
697
698InstructionBlocks* InstructionSequence::InstructionBlocksFor(
699 Zone* zone, const Schedule* schedule) {
700 InstructionBlocks* blocks = zone->NewArray<InstructionBlocks>(1);
701 new (blocks) InstructionBlocks(
702 static_cast<int>(schedule->rpo_order()->size()), nullptr, zone);
703 size_t rpo_number = 0;
704 for (BasicBlockVector::const_iterator it = schedule->rpo_order()->begin();
705 it != schedule->rpo_order()->end(); ++it, ++rpo_number) {
706 DCHECK(!(*blocks)[rpo_number]);
707 DCHECK(GetRpo(*it).ToSize() == rpo_number);
708 (*blocks)[rpo_number] = InstructionBlockFor(zone, *it);
709 }
710 return blocks;
711}
712
713void InstructionSequence::ValidateEdgeSplitForm() const {
714 // Validate blocks are in edge-split form: no block with multiple successors
715 // has an edge to a block (== a successor) with more than one predecessors.
716 for (const InstructionBlock* block : instruction_blocks()) {
717 if (block->SuccessorCount() > 1) {
718 for (const RpoNumber& successor_id : block->successors()) {
719 const InstructionBlock* successor = InstructionBlockAt(successor_id);
720 // Expect precisely one predecessor: "block".
721 CHECK(successor->PredecessorCount() == 1 &&
722 successor->predecessors()[0] == block->rpo_number());
723 }
724 }
725 }
726}
727
728void InstructionSequence::ValidateDeferredBlockExitPaths() const {
729 // A deferred block with more than one successor must have all its successors
730 // deferred.
731 for (const InstructionBlock* block : instruction_blocks()) {
732 if (!block->IsDeferred() || block->SuccessorCount() <= 1) continue;
733 for (RpoNumber successor_id : block->successors()) {
734 CHECK(InstructionBlockAt(successor_id)->IsDeferred());
735 }
736 }
737}
738
739void InstructionSequence::ValidateDeferredBlockEntryPaths() const {
740 // If a deferred block has multiple predecessors, they have to
741 // all be deferred. Otherwise, we can run into a situation where a range
742 // that spills only in deferred blocks inserts its spill in the block, but
743 // other ranges need moves inserted by ResolveControlFlow in the predecessors,
744 // which may clobber the register of this range.
745 for (const InstructionBlock* block : instruction_blocks()) {
746 if (!block->IsDeferred() || block->PredecessorCount() <= 1) continue;
747 for (RpoNumber predecessor_id : block->predecessors()) {
748 CHECK(InstructionBlockAt(predecessor_id)->IsDeferred());
749 }
750 }
751}
752
753void InstructionSequence::ValidateSSA() const {
754 // TODO(mtrofin): We could use a local zone here instead.
755 BitVector definitions(VirtualRegisterCount(), zone());
756 for (const Instruction* instruction : *this) {
757 for (size_t i = 0; i < instruction->OutputCount(); ++i) {
758 const InstructionOperand* output = instruction->OutputAt(i);
759 int vreg = (output->IsConstant())
760 ? ConstantOperand::cast(output)->virtual_register()
761 : UnallocatedOperand::cast(output)->virtual_register();
762 CHECK(!definitions.Contains(vreg));
763 definitions.Add(vreg);
764 }
765 }
766}
767
768void InstructionSequence::ComputeAssemblyOrder() {
769 int ao = 0;
770 RpoNumber invalid = RpoNumber::Invalid();
771
772 ao_blocks_ = zone()->NewArray<InstructionBlocks>(1);
773 new (ao_blocks_) InstructionBlocks(zone());
774 ao_blocks_->reserve(instruction_blocks_->size());
775
776 // Place non-deferred blocks.
777 for (InstructionBlock* const block : *instruction_blocks_) {
778 DCHECK_NOT_NULL(block);
779 if (block->IsDeferred()) continue; // skip deferred blocks.
780 if (block->ao_number() != invalid) continue; // loop rotated.
781 if (block->IsLoopHeader()) {
782 bool header_align = true;
783 if (FLAG_turbo_loop_rotation) {
784 // Perform loop rotation for non-deferred loops.
785 InstructionBlock* loop_end =
786 instruction_blocks_->at(block->loop_end().ToSize() - 1);
787 if (loop_end->SuccessorCount() == 1 && /* ends with goto */
788 loop_end != block /* not a degenerate infinite loop */) {
789 // If the last block has an unconditional jump back to the header,
790 // then move it to be in front of the header in the assembly order.
791 DCHECK_EQ(block->rpo_number(), loop_end->successors()[0]);
792 loop_end->set_ao_number(RpoNumber::FromInt(ao++));
793 ao_blocks_->push_back(loop_end);
794 // This block will be the new machine-level loop header, so align
795 // this block instead of the loop header block.
796 loop_end->set_alignment(true);
797 header_align = false;
798 }
799 }
800 block->set_alignment(header_align);
801 }
802 if (block->loop_header().IsValid() && block->IsSwitchTarget()) {
803 block->set_alignment(true);
804 }
805 block->set_ao_number(RpoNumber::FromInt(ao++));
806 ao_blocks_->push_back(block);
807 }
808 // Add all leftover (deferred) blocks.
809 for (InstructionBlock* const block : *instruction_blocks_) {
810 if (block->ao_number() == invalid) {
811 block->set_ao_number(RpoNumber::FromInt(ao++));
812 ao_blocks_->push_back(block);
813 }
814 }
815 DCHECK_EQ(instruction_blocks_->size(), ao);
816}
817
818void InstructionSequence::RecomputeAssemblyOrderForTesting() {
819 RpoNumber invalid = RpoNumber::Invalid();
820 for (InstructionBlock* block : *instruction_blocks_) {
821 block->set_ao_number(invalid);
822 }
823 ComputeAssemblyOrder();
824}
825
826InstructionSequence::InstructionSequence(Isolate* isolate,
827 Zone* instruction_zone,
828 InstructionBlocks* instruction_blocks)
829 : isolate_(isolate),
830 zone_(instruction_zone),
831 instruction_blocks_(instruction_blocks),
832 ao_blocks_(nullptr),
833 source_positions_(zone()),
834 constants_(ConstantMap::key_compare(),
835 ConstantMap::allocator_type(zone())),
836 immediates_(zone()),
837 instructions_(zone()),
838 next_virtual_register_(0),
839 reference_maps_(zone()),
840 representations_(zone()),
841 representation_mask_(0),
842 deoptimization_entries_(zone()),
843 current_block_(nullptr) {
844 ComputeAssemblyOrder();
845}
846
847int InstructionSequence::NextVirtualRegister() {
848 int virtual_register = next_virtual_register_++;
849 CHECK_NE(virtual_register, InstructionOperand::kInvalidVirtualRegister);
850 return virtual_register;
851}
852
853Instruction* InstructionSequence::GetBlockStart(RpoNumber rpo) const {
854 const InstructionBlock* block = InstructionBlockAt(rpo);
855 return InstructionAt(block->code_start());
856}
857
858void InstructionSequence::StartBlock(RpoNumber rpo) {
859 DCHECK_NULL(current_block_);
860 current_block_ = InstructionBlockAt(rpo);
861 int code_start = static_cast<int>(instructions_.size());
862 current_block_->set_code_start(code_start);
863}
864
865void InstructionSequence::EndBlock(RpoNumber rpo) {
866 int end = static_cast<int>(instructions_.size());
867 DCHECK_EQ(current_block_->rpo_number(), rpo);
868 CHECK(current_block_->code_start() >= 0 &&
869 current_block_->code_start() < end);
870 current_block_->set_code_end(end);
871 current_block_ = nullptr;
872}
873
874int InstructionSequence::AddInstruction(Instruction* instr) {
875 DCHECK_NOT_NULL(current_block_);
876 int index = static_cast<int>(instructions_.size());
877 instr->set_block(current_block_);
878 instructions_.push_back(instr);
879 if (instr->NeedsReferenceMap()) {
880 DCHECK_NULL(instr->reference_map());
881 ReferenceMap* reference_map = new (zone()) ReferenceMap(zone());
882 reference_map->set_instruction_position(index);
883 instr->set_reference_map(reference_map);
884 reference_maps_.push_back(reference_map);
885 }
886 return index;
887}
888
889InstructionBlock* InstructionSequence::GetInstructionBlock(
890 int instruction_index) const {
891 return instructions()[instruction_index]->block();
892}
893
894static MachineRepresentation FilterRepresentation(MachineRepresentation rep) {
895 switch (rep) {
896 case MachineRepresentation::kBit:
897 case MachineRepresentation::kWord8:
898 case MachineRepresentation::kWord16:
899 return InstructionSequence::DefaultRepresentation();
900 case MachineRepresentation::kWord32:
901 case MachineRepresentation::kWord64:
902 case MachineRepresentation::kTaggedSigned:
903 case MachineRepresentation::kTaggedPointer:
904 case MachineRepresentation::kTagged:
905 case MachineRepresentation::kFloat32:
906 case MachineRepresentation::kFloat64:
907 case MachineRepresentation::kSimd128:
908 case MachineRepresentation::kCompressedSigned:
909 case MachineRepresentation::kCompressedPointer:
910 case MachineRepresentation::kCompressed:
911 return rep;
912 case MachineRepresentation::kNone:
913 break;
914 }
915
916 UNREACHABLE();
917}
918
919MachineRepresentation InstructionSequence::GetRepresentation(
920 int virtual_register) const {
921 DCHECK_LE(0, virtual_register);
922 DCHECK_LT(virtual_register, VirtualRegisterCount());
923 if (virtual_register >= static_cast<int>(representations_.size())) {
924 return DefaultRepresentation();
925 }
926 return representations_[virtual_register];
927}
928
929void InstructionSequence::MarkAsRepresentation(MachineRepresentation rep,
930 int virtual_register) {
931 DCHECK_LE(0, virtual_register);
932 DCHECK_LT(virtual_register, VirtualRegisterCount());
933 if (virtual_register >= static_cast<int>(representations_.size())) {
934 representations_.resize(VirtualRegisterCount(), DefaultRepresentation());
935 }
936 rep = FilterRepresentation(rep);
937 DCHECK_IMPLIES(representations_[virtual_register] != rep,
938 representations_[virtual_register] == DefaultRepresentation());
939 representations_[virtual_register] = rep;
940 representation_mask_ |= RepresentationBit(rep);
941}
942
943int InstructionSequence::AddDeoptimizationEntry(
944 FrameStateDescriptor* descriptor, DeoptimizeKind kind,
945 DeoptimizeReason reason, VectorSlotPair const& feedback) {
946 int deoptimization_id = static_cast<int>(deoptimization_entries_.size());
947 deoptimization_entries_.push_back(
948 DeoptimizationEntry(descriptor, kind, reason, feedback));
949 return deoptimization_id;
950}
951
952DeoptimizationEntry const& InstructionSequence::GetDeoptimizationEntry(
953 int state_id) {
954 return deoptimization_entries_[state_id];
955}
956
957RpoNumber InstructionSequence::InputRpo(Instruction* instr, size_t index) {
958 InstructionOperand* operand = instr->InputAt(index);
959 Constant constant =
960 operand->IsImmediate()
961 ? GetImmediate(ImmediateOperand::cast(operand))
962 : GetConstant(ConstantOperand::cast(operand)->virtual_register());
963 return constant.ToRpoNumber();
964}
965
966bool InstructionSequence::GetSourcePosition(const Instruction* instr,
967 SourcePosition* result) const {
968 auto it = source_positions_.find(instr);
969 if (it == source_positions_.end()) return false;
970 *result = it->second;
971 return true;
972}
973
974void InstructionSequence::SetSourcePosition(const Instruction* instr,
975 SourcePosition value) {
976 source_positions_.insert(std::make_pair(instr, value));
977}
978
979void InstructionSequence::Print() const {
980 StdoutStream{} << *this << std::endl;
981}
982
983void InstructionSequence::PrintBlock(int block_id) const {
984 RpoNumber rpo = RpoNumber::FromInt(block_id);
985 const InstructionBlock* block = InstructionBlockAt(rpo);
986 CHECK(block->rpo_number() == rpo);
987 StdoutStream{} << PrintableInstructionBlock{block, this} << std::endl;
988}
989
990const RegisterConfiguration*
991 InstructionSequence::registerConfigurationForTesting_ = nullptr;
992
993const RegisterConfiguration*
994InstructionSequence::RegisterConfigurationForTesting() {
995 DCHECK_NOT_NULL(registerConfigurationForTesting_);
996 return registerConfigurationForTesting_;
997}
998
999void InstructionSequence::SetRegisterConfigurationForTesting(
1000 const RegisterConfiguration* regConfig) {
1001 registerConfigurationForTesting_ = regConfig;
1002 GetRegConfig = InstructionSequence::RegisterConfigurationForTesting;
1003}
1004
1005FrameStateDescriptor::FrameStateDescriptor(
1006 Zone* zone, FrameStateType type, BailoutId bailout_id,
1007 OutputFrameStateCombine state_combine, size_t parameters_count,
1008 size_t locals_count, size_t stack_count,
1009 MaybeHandle<SharedFunctionInfo> shared_info,
1010 FrameStateDescriptor* outer_state)
1011 : type_(type),
1012 bailout_id_(bailout_id),
1013 frame_state_combine_(state_combine),
1014 parameters_count_(parameters_count),
1015 locals_count_(locals_count),
1016 stack_count_(stack_count),
1017 values_(zone),
1018 shared_info_(shared_info),
1019 outer_state_(outer_state) {}
1020
1021size_t FrameStateDescriptor::GetSize() const {
1022 return 1 + parameters_count() + locals_count() + stack_count() +
1023 (HasContext() ? 1 : 0);
1024}
1025
1026size_t FrameStateDescriptor::GetTotalSize() const {
1027 size_t total_size = 0;
1028 for (const FrameStateDescriptor* iter = this; iter != nullptr;
1029 iter = iter->outer_state_) {
1030 total_size += iter->GetSize();
1031 }
1032 return total_size;
1033}
1034
1035size_t FrameStateDescriptor::GetFrameCount() const {
1036 size_t count = 0;
1037 for (const FrameStateDescriptor* iter = this; iter != nullptr;
1038 iter = iter->outer_state_) {
1039 ++count;
1040 }
1041 return count;
1042}
1043
1044size_t FrameStateDescriptor::GetJSFrameCount() const {
1045 size_t count = 0;
1046 for (const FrameStateDescriptor* iter = this; iter != nullptr;
1047 iter = iter->outer_state_) {
1048 if (FrameStateFunctionInfo::IsJSFunctionType(iter->type_)) {
1049 ++count;
1050 }
1051 }
1052 return count;
1053}
1054
1055std::ostream& operator<<(std::ostream& os, const RpoNumber& rpo) {
1056 return os << rpo.ToSize();
1057}
1058
1059std::ostream& operator<<(std::ostream& os, const InstructionSequence& code) {
1060 for (size_t i = 0; i < code.immediates_.size(); ++i) {
1061 Constant constant = code.immediates_[i];
1062 os << "IMM#" << i << ": " << constant << "\n";
1063 }
1064 int i = 0;
1065 for (ConstantMap::const_iterator it = code.constants_.begin();
1066 it != code.constants_.end(); ++i, ++it) {
1067 os << "CST#" << i << ": v" << it->first << " = " << it->second << "\n";
1068 }
1069 for (int i = 0; i < code.InstructionBlockCount(); i++) {
1070 auto* block = code.InstructionBlockAt(RpoNumber::FromInt(i));
1071 os << PrintableInstructionBlock{block, &code};
1072 }
1073 return os;
1074}
1075
1076} // namespace compiler
1077} // namespace internal
1078} // namespace v8
1079