1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/backend/register-allocator-verifier.h"
6
7#include "src/bit-vector.h"
8#include "src/compiler/backend/instruction.h"
9#include "src/ostreams.h"
10
11namespace v8 {
12namespace internal {
13namespace compiler {
14
15namespace {
16
17size_t OperandCount(const Instruction* instr) {
18 return instr->InputCount() + instr->OutputCount() + instr->TempCount();
19}
20
21void VerifyEmptyGaps(const Instruction* instr) {
22 for (int i = Instruction::FIRST_GAP_POSITION;
23 i <= Instruction::LAST_GAP_POSITION; i++) {
24 Instruction::GapPosition inner_pos =
25 static_cast<Instruction::GapPosition>(i);
26 CHECK_NULL(instr->GetParallelMove(inner_pos));
27 }
28}
29
30void VerifyAllocatedGaps(const Instruction* instr, const char* caller_info) {
31 for (int i = Instruction::FIRST_GAP_POSITION;
32 i <= Instruction::LAST_GAP_POSITION; i++) {
33 Instruction::GapPosition inner_pos =
34 static_cast<Instruction::GapPosition>(i);
35 const ParallelMove* moves = instr->GetParallelMove(inner_pos);
36 if (moves == nullptr) continue;
37 for (const MoveOperands* move : *moves) {
38 if (move->IsRedundant()) continue;
39 CHECK_WITH_MSG(
40 move->source().IsAllocated() || move->source().IsConstant(),
41 caller_info);
42 CHECK_WITH_MSG(move->destination().IsAllocated(), caller_info);
43 }
44 }
45}
46
47} // namespace
48
49RegisterAllocatorVerifier::RegisterAllocatorVerifier(
50 Zone* zone, const RegisterConfiguration* config,
51 const InstructionSequence* sequence)
52 : zone_(zone),
53 config_(config),
54 sequence_(sequence),
55 constraints_(zone),
56 assessments_(zone),
57 outstanding_assessments_(zone) {
58 constraints_.reserve(sequence->instructions().size());
59 // TODO(dcarney): model unique constraints.
60 // Construct OperandConstraints for all InstructionOperands, eliminating
61 // kSameAsFirst along the way.
62 for (const Instruction* instr : sequence->instructions()) {
63 // All gaps should be totally unallocated at this point.
64 VerifyEmptyGaps(instr);
65 const size_t operand_count = OperandCount(instr);
66 OperandConstraint* op_constraints =
67 zone->NewArray<OperandConstraint>(operand_count);
68 size_t count = 0;
69 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
70 BuildConstraint(instr->InputAt(i), &op_constraints[count]);
71 VerifyInput(op_constraints[count]);
72 }
73 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
74 BuildConstraint(instr->TempAt(i), &op_constraints[count]);
75 VerifyTemp(op_constraints[count]);
76 }
77 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
78 BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
79 if (op_constraints[count].type_ == kSameAsFirst) {
80 CHECK_LT(0, instr->InputCount());
81 op_constraints[count].type_ = op_constraints[0].type_;
82 op_constraints[count].value_ = op_constraints[0].value_;
83 }
84 VerifyOutput(op_constraints[count]);
85 }
86 InstructionConstraint instr_constraint = {instr, operand_count,
87 op_constraints};
88 constraints()->push_back(instr_constraint);
89 }
90}
91
92void RegisterAllocatorVerifier::VerifyInput(
93 const OperandConstraint& constraint) {
94 CHECK_NE(kSameAsFirst, constraint.type_);
95 if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) {
96 CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
97 constraint.virtual_register_);
98 }
99}
100
101void RegisterAllocatorVerifier::VerifyTemp(
102 const OperandConstraint& constraint) {
103 CHECK_NE(kSameAsFirst, constraint.type_);
104 CHECK_NE(kImmediate, constraint.type_);
105 CHECK_NE(kExplicit, constraint.type_);
106 CHECK_NE(kConstant, constraint.type_);
107}
108
109void RegisterAllocatorVerifier::VerifyOutput(
110 const OperandConstraint& constraint) {
111 CHECK_NE(kImmediate, constraint.type_);
112 CHECK_NE(kExplicit, constraint.type_);
113 CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
114 constraint.virtual_register_);
115}
116
117void RegisterAllocatorVerifier::VerifyAssignment(const char* caller_info) {
118 caller_info_ = caller_info;
119 CHECK(sequence()->instructions().size() == constraints()->size());
120 auto instr_it = sequence()->begin();
121 for (const auto& instr_constraint : *constraints()) {
122 const Instruction* instr = instr_constraint.instruction_;
123 // All gaps should be totally allocated at this point.
124 VerifyAllocatedGaps(instr, caller_info_);
125 const size_t operand_count = instr_constraint.operand_constaints_size_;
126 const OperandConstraint* op_constraints =
127 instr_constraint.operand_constraints_;
128 CHECK_EQ(instr, *instr_it);
129 CHECK(operand_count == OperandCount(instr));
130 size_t count = 0;
131 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
132 CheckConstraint(instr->InputAt(i), &op_constraints[count]);
133 }
134 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
135 CheckConstraint(instr->TempAt(i), &op_constraints[count]);
136 }
137 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
138 CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
139 }
140 ++instr_it;
141 }
142}
143
144void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
145 OperandConstraint* constraint) {
146 constraint->value_ = kMinInt;
147 constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
148 if (op->IsConstant()) {
149 constraint->type_ = kConstant;
150 constraint->value_ = ConstantOperand::cast(op)->virtual_register();
151 constraint->virtual_register_ = constraint->value_;
152 } else if (op->IsExplicit()) {
153 constraint->type_ = kExplicit;
154 } else if (op->IsImmediate()) {
155 const ImmediateOperand* imm = ImmediateOperand::cast(op);
156 int value = imm->type() == ImmediateOperand::INLINE ? imm->inline_value()
157 : imm->indexed_value();
158 constraint->type_ = kImmediate;
159 constraint->value_ = value;
160 } else {
161 CHECK(op->IsUnallocated());
162 const UnallocatedOperand* unallocated = UnallocatedOperand::cast(op);
163 int vreg = unallocated->virtual_register();
164 constraint->virtual_register_ = vreg;
165 if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
166 constraint->type_ = kFixedSlot;
167 constraint->value_ = unallocated->fixed_slot_index();
168 } else {
169 switch (unallocated->extended_policy()) {
170 case UnallocatedOperand::REGISTER_OR_SLOT:
171 case UnallocatedOperand::NONE:
172 if (sequence()->IsFP(vreg)) {
173 constraint->type_ = kRegisterOrSlotFP;
174 } else {
175 constraint->type_ = kRegisterOrSlot;
176 }
177 break;
178 case UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT:
179 DCHECK(!sequence()->IsFP(vreg));
180 constraint->type_ = kRegisterOrSlotOrConstant;
181 break;
182 case UnallocatedOperand::FIXED_REGISTER:
183 if (unallocated->HasSecondaryStorage()) {
184 constraint->type_ = kRegisterAndSlot;
185 constraint->spilled_slot_ = unallocated->GetSecondaryStorage();
186 } else {
187 constraint->type_ = kFixedRegister;
188 }
189 constraint->value_ = unallocated->fixed_register_index();
190 break;
191 case UnallocatedOperand::FIXED_FP_REGISTER:
192 constraint->type_ = kFixedFPRegister;
193 constraint->value_ = unallocated->fixed_register_index();
194 break;
195 case UnallocatedOperand::MUST_HAVE_REGISTER:
196 if (sequence()->IsFP(vreg)) {
197 constraint->type_ = kFPRegister;
198 } else {
199 constraint->type_ = kRegister;
200 }
201 break;
202 case UnallocatedOperand::MUST_HAVE_SLOT:
203 constraint->type_ = kSlot;
204 constraint->value_ =
205 ElementSizeLog2Of(sequence()->GetRepresentation(vreg));
206 break;
207 case UnallocatedOperand::SAME_AS_FIRST_INPUT:
208 constraint->type_ = kSameAsFirst;
209 break;
210 }
211 }
212 }
213}
214
215void RegisterAllocatorVerifier::CheckConstraint(
216 const InstructionOperand* op, const OperandConstraint* constraint) {
217 switch (constraint->type_) {
218 case kConstant:
219 CHECK_WITH_MSG(op->IsConstant(), caller_info_);
220 CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
221 constraint->value_);
222 return;
223 case kImmediate: {
224 CHECK_WITH_MSG(op->IsImmediate(), caller_info_);
225 const ImmediateOperand* imm = ImmediateOperand::cast(op);
226 int value = imm->type() == ImmediateOperand::INLINE
227 ? imm->inline_value()
228 : imm->indexed_value();
229 CHECK_EQ(value, constraint->value_);
230 return;
231 }
232 case kRegister:
233 CHECK_WITH_MSG(op->IsRegister(), caller_info_);
234 return;
235 case kFPRegister:
236 CHECK_WITH_MSG(op->IsFPRegister(), caller_info_);
237 return;
238 case kExplicit:
239 CHECK_WITH_MSG(op->IsExplicit(), caller_info_);
240 return;
241 case kFixedRegister:
242 case kRegisterAndSlot:
243 CHECK_WITH_MSG(op->IsRegister(), caller_info_);
244 CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
245 return;
246 case kFixedFPRegister:
247 CHECK_WITH_MSG(op->IsFPRegister(), caller_info_);
248 CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
249 return;
250 case kFixedSlot:
251 CHECK_WITH_MSG(op->IsStackSlot() || op->IsFPStackSlot(), caller_info_);
252 CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
253 return;
254 case kSlot:
255 CHECK_WITH_MSG(op->IsStackSlot() || op->IsFPStackSlot(), caller_info_);
256 CHECK_EQ(ElementSizeLog2Of(LocationOperand::cast(op)->representation()),
257 constraint->value_);
258 return;
259 case kRegisterOrSlot:
260 CHECK_WITH_MSG(op->IsRegister() || op->IsStackSlot(), caller_info_);
261 return;
262 case kRegisterOrSlotFP:
263 CHECK_WITH_MSG(op->IsFPRegister() || op->IsFPStackSlot(), caller_info_);
264 return;
265 case kRegisterOrSlotOrConstant:
266 CHECK_WITH_MSG(op->IsRegister() || op->IsStackSlot() || op->IsConstant(),
267 caller_info_);
268 return;
269 case kSameAsFirst:
270 CHECK_WITH_MSG(false, caller_info_);
271 return;
272 }
273}
274
275void BlockAssessments::PerformMoves(const Instruction* instruction) {
276 const ParallelMove* first =
277 instruction->GetParallelMove(Instruction::GapPosition::START);
278 PerformParallelMoves(first);
279 const ParallelMove* last =
280 instruction->GetParallelMove(Instruction::GapPosition::END);
281 PerformParallelMoves(last);
282}
283
284void BlockAssessments::PerformParallelMoves(const ParallelMove* moves) {
285 if (moves == nullptr) return;
286
287 CHECK(map_for_moves_.empty());
288 for (MoveOperands* move : *moves) {
289 if (move->IsEliminated() || move->IsRedundant()) continue;
290 auto it = map_.find(move->source());
291 // The RHS of a parallel move should have been already assessed.
292 CHECK(it != map_.end());
293 // The LHS of a parallel move should not have been assigned in this
294 // parallel move.
295 CHECK(map_for_moves_.find(move->destination()) == map_for_moves_.end());
296 // Copy the assessment to the destination.
297 map_for_moves_[move->destination()] = it->second;
298 }
299 for (auto pair : map_for_moves_) {
300 map_[pair.first] = pair.second;
301 }
302 map_for_moves_.clear();
303}
304
305void BlockAssessments::DropRegisters() {
306 for (auto iterator = map().begin(), end = map().end(); iterator != end;) {
307 auto current = iterator;
308 ++iterator;
309 InstructionOperand op = current->first;
310 if (op.IsAnyRegister()) map().erase(current);
311 }
312}
313
314void BlockAssessments::Print() const {
315 StdoutStream os;
316 for (const auto pair : map()) {
317 const InstructionOperand op = pair.first;
318 const Assessment* assessment = pair.second;
319 // Use operator<< so we can write the assessment on the same
320 // line.
321 os << op << " : ";
322 if (assessment->kind() == AssessmentKind::Final) {
323 os << "v" << FinalAssessment::cast(assessment)->virtual_register();
324 } else {
325 os << "P";
326 }
327 os << std::endl;
328 }
329 os << std::endl;
330}
331
332BlockAssessments* RegisterAllocatorVerifier::CreateForBlock(
333 const InstructionBlock* block) {
334 RpoNumber current_block_id = block->rpo_number();
335
336 BlockAssessments* ret = new (zone()) BlockAssessments(zone());
337 if (block->PredecessorCount() == 0) {
338 // TODO(mtrofin): the following check should hold, however, in certain
339 // unit tests it is invalidated by the last block. Investigate and
340 // normalize the CFG.
341 // CHECK_EQ(0, current_block_id.ToInt());
342 // The phi size test below is because we can, technically, have phi
343 // instructions with one argument. Some tests expose that, too.
344 } else if (block->PredecessorCount() == 1 && block->phis().size() == 0) {
345 const BlockAssessments* prev_block = assessments_[block->predecessors()[0]];
346 ret->CopyFrom(prev_block);
347 } else {
348 for (RpoNumber pred_id : block->predecessors()) {
349 // For every operand coming from any of the predecessors, create an
350 // Unfinalized assessment.
351 auto iterator = assessments_.find(pred_id);
352 if (iterator == assessments_.end()) {
353 // This block is the head of a loop, and this predecessor is the
354 // loopback
355 // arc.
356 // Validate this is a loop case, otherwise the CFG is malformed.
357 CHECK(pred_id >= current_block_id);
358 CHECK(block->IsLoopHeader());
359 continue;
360 }
361 const BlockAssessments* pred_assessments = iterator->second;
362 CHECK_NOT_NULL(pred_assessments);
363 for (auto pair : pred_assessments->map()) {
364 InstructionOperand operand = pair.first;
365 if (ret->map().find(operand) == ret->map().end()) {
366 ret->map().insert(std::make_pair(
367 operand, new (zone()) PendingAssessment(zone(), block, operand)));
368 }
369 }
370 }
371 }
372 return ret;
373}
374
375void RegisterAllocatorVerifier::ValidatePendingAssessment(
376 RpoNumber block_id, InstructionOperand op,
377 const BlockAssessments* current_assessments,
378 PendingAssessment* const assessment, int virtual_register) {
379 if (assessment->IsAliasOf(virtual_register)) return;
380
381 // When validating a pending assessment, it is possible some of the
382 // assessments for the original operand (the one where the assessment was
383 // created for first) are also pending. To avoid recursion, we use a work
384 // list. To deal with cycles, we keep a set of seen nodes.
385 Zone local_zone(zone()->allocator(), ZONE_NAME);
386 ZoneQueue<std::pair<const PendingAssessment*, int>> worklist(&local_zone);
387 ZoneSet<RpoNumber> seen(&local_zone);
388 worklist.push(std::make_pair(assessment, virtual_register));
389 seen.insert(block_id);
390
391 while (!worklist.empty()) {
392 auto work = worklist.front();
393 const PendingAssessment* current_assessment = work.first;
394 int current_virtual_register = work.second;
395 InstructionOperand current_operand = current_assessment->operand();
396 worklist.pop();
397
398 const InstructionBlock* origin = current_assessment->origin();
399 CHECK(origin->PredecessorCount() > 1 || origin->phis().size() > 0);
400
401 // Check if the virtual register is a phi first, instead of relying on
402 // the incoming assessments. In particular, this handles the case
403 // v1 = phi v0 v0, which structurally is identical to v0 having been
404 // defined at the top of a diamond, and arriving at the node joining the
405 // diamond's branches.
406 const PhiInstruction* phi = nullptr;
407 for (const PhiInstruction* candidate : origin->phis()) {
408 if (candidate->virtual_register() == current_virtual_register) {
409 phi = candidate;
410 break;
411 }
412 }
413
414 int op_index = 0;
415 for (RpoNumber pred : origin->predecessors()) {
416 int expected =
417 phi != nullptr ? phi->operands()[op_index] : current_virtual_register;
418
419 ++op_index;
420 auto pred_assignment = assessments_.find(pred);
421 if (pred_assignment == assessments_.end()) {
422 CHECK(origin->IsLoopHeader());
423 auto todo_iter = outstanding_assessments_.find(pred);
424 DelayedAssessments* set = nullptr;
425 if (todo_iter == outstanding_assessments_.end()) {
426 set = new (zone()) DelayedAssessments(zone());
427 outstanding_assessments_.insert(std::make_pair(pred, set));
428 } else {
429 set = todo_iter->second;
430 }
431 set->AddDelayedAssessment(current_operand, expected);
432 continue;
433 }
434
435 const BlockAssessments* pred_assessments = pred_assignment->second;
436 auto found_contribution = pred_assessments->map().find(current_operand);
437 CHECK(found_contribution != pred_assessments->map().end());
438 Assessment* contribution = found_contribution->second;
439
440 switch (contribution->kind()) {
441 case Final:
442 CHECK_EQ(FinalAssessment::cast(contribution)->virtual_register(),
443 expected);
444 break;
445 case Pending: {
446 // This happens if we have a diamond feeding into another one, and
447 // the inner one never being used - other than for carrying the value.
448 const PendingAssessment* next = PendingAssessment::cast(contribution);
449 if (seen.find(pred) == seen.end()) {
450 worklist.push({next, expected});
451 seen.insert(pred);
452 }
453 // Note that we do not want to finalize pending assessments at the
454 // beginning of a block - which is the information we'd have
455 // available here. This is because this operand may be reused to
456 // define duplicate phis.
457 break;
458 }
459 }
460 }
461 }
462 assessment->AddAlias(virtual_register);
463}
464
465void RegisterAllocatorVerifier::ValidateUse(
466 RpoNumber block_id, BlockAssessments* current_assessments,
467 InstructionOperand op, int virtual_register) {
468 auto iterator = current_assessments->map().find(op);
469 // We should have seen this operand before.
470 CHECK(iterator != current_assessments->map().end());
471 Assessment* assessment = iterator->second;
472
473 switch (assessment->kind()) {
474 case Final:
475 CHECK_EQ(FinalAssessment::cast(assessment)->virtual_register(),
476 virtual_register);
477 break;
478 case Pending: {
479 PendingAssessment* pending = PendingAssessment::cast(assessment);
480 ValidatePendingAssessment(block_id, op, current_assessments, pending,
481 virtual_register);
482 break;
483 }
484 }
485}
486
487void RegisterAllocatorVerifier::VerifyGapMoves() {
488 CHECK(assessments_.empty());
489 CHECK(outstanding_assessments_.empty());
490 const size_t block_count = sequence()->instruction_blocks().size();
491 for (size_t block_index = 0; block_index < block_count; ++block_index) {
492 const InstructionBlock* block =
493 sequence()->instruction_blocks()[block_index];
494 BlockAssessments* block_assessments = CreateForBlock(block);
495
496 for (int instr_index = block->code_start(); instr_index < block->code_end();
497 ++instr_index) {
498 const InstructionConstraint& instr_constraint = constraints_[instr_index];
499 const Instruction* instr = instr_constraint.instruction_;
500 block_assessments->PerformMoves(instr);
501
502 const OperandConstraint* op_constraints =
503 instr_constraint.operand_constraints_;
504 size_t count = 0;
505 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
506 if (op_constraints[count].type_ == kImmediate ||
507 op_constraints[count].type_ == kExplicit) {
508 continue;
509 }
510 int virtual_register = op_constraints[count].virtual_register_;
511 InstructionOperand op = *instr->InputAt(i);
512 ValidateUse(block->rpo_number(), block_assessments, op,
513 virtual_register);
514 }
515 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
516 block_assessments->Drop(*instr->TempAt(i));
517 }
518 if (instr->IsCall()) {
519 block_assessments->DropRegisters();
520 }
521 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
522 int virtual_register = op_constraints[count].virtual_register_;
523 block_assessments->AddDefinition(*instr->OutputAt(i), virtual_register);
524 if (op_constraints[count].type_ == kRegisterAndSlot) {
525 const AllocatedOperand* reg_op =
526 AllocatedOperand::cast(instr->OutputAt(i));
527 MachineRepresentation rep = reg_op->representation();
528 const AllocatedOperand* stack_op = AllocatedOperand::New(
529 zone(), LocationOperand::LocationKind::STACK_SLOT, rep,
530 op_constraints[i].spilled_slot_);
531 block_assessments->AddDefinition(*stack_op, virtual_register);
532 }
533 }
534 }
535 // Now commit the assessments for this block. If there are any delayed
536 // assessments, ValidatePendingAssessment should see this block, too.
537 assessments_[block->rpo_number()] = block_assessments;
538
539 auto todo_iter = outstanding_assessments_.find(block->rpo_number());
540 if (todo_iter == outstanding_assessments_.end()) continue;
541 DelayedAssessments* todo = todo_iter->second;
542 for (auto pair : todo->map()) {
543 InstructionOperand op = pair.first;
544 int vreg = pair.second;
545 auto found_op = block_assessments->map().find(op);
546 CHECK(found_op != block_assessments->map().end());
547 switch (found_op->second->kind()) {
548 case Final:
549 CHECK_EQ(FinalAssessment::cast(found_op->second)->virtual_register(),
550 vreg);
551 break;
552 case Pending:
553 ValidatePendingAssessment(block->rpo_number(), op, block_assessments,
554 PendingAssessment::cast(found_op->second),
555 vreg);
556 break;
557 }
558 }
559 }
560}
561
562} // namespace compiler
563} // namespace internal
564} // namespace v8
565