1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/backend/live-range-separator.h"
6#include "src/compiler/backend/register-allocator.h"
7
8namespace v8 {
9namespace internal {
10namespace compiler {
11
12#define TRACE(...) \
13 do { \
14 if (FLAG_trace_alloc) PrintF(__VA_ARGS__); \
15 } while (false)
16
17namespace {
18
19void CreateSplinter(TopLevelLiveRange* range, RegisterAllocationData* data,
20 LifetimePosition first_cut, LifetimePosition last_cut) {
21 DCHECK(!range->IsSplinter());
22 // We can ignore ranges that live solely in deferred blocks.
23 // If a range ends right at the end of a deferred block, it is marked by
24 // the range builder as ending at gap start of the next block - since the
25 // end is a position where the variable isn't live. We need to take that
26 // into consideration.
27 LifetimePosition max_allowed_end = last_cut.NextFullStart();
28
29 if (first_cut <= range->Start() && max_allowed_end >= range->End()) {
30 return;
31 }
32
33 LifetimePosition start = Max(first_cut, range->Start());
34 LifetimePosition end = Min(last_cut, range->End());
35
36 if (start < end) {
37 // Ensure the original range has a spill range associated, before it gets
38 // splintered. Splinters will point to it. This way, when attempting to
39 // reuse spill slots of splinters, during allocation, we avoid clobbering
40 // such slots.
41 if (range->MayRequireSpillRange()) {
42 data->CreateSpillRangeForLiveRange(range);
43 }
44 if (range->splinter() == nullptr) {
45 TopLevelLiveRange* splinter =
46 data->NextLiveRange(range->representation());
47 DCHECK_NULL(data->live_ranges()[splinter->vreg()]);
48 data->live_ranges()[splinter->vreg()] = splinter;
49 range->SetSplinter(splinter);
50 }
51 Zone* zone = data->allocation_zone();
52 TRACE("creating splinter %d for range %d between %d and %d\n",
53 range->splinter()->vreg(), range->vreg(), start.ToInstructionIndex(),
54 end.ToInstructionIndex());
55 range->Splinter(start, end, zone);
56 }
57}
58
59void SetSlotUse(TopLevelLiveRange* range) {
60 range->reset_slot_use();
61 for (const UsePosition* pos = range->first_pos();
62 !range->has_slot_use() && pos != nullptr; pos = pos->next()) {
63 if (pos->type() == UsePositionType::kRequiresSlot) {
64 range->register_slot_use(TopLevelLiveRange::SlotUseKind::kGeneralSlotUse);
65 }
66 }
67}
68
69void SplinterLiveRange(TopLevelLiveRange* range, RegisterAllocationData* data) {
70 const InstructionSequence* code = data->code();
71 UseInterval* interval = range->first_interval();
72
73 LifetimePosition first_cut = LifetimePosition::Invalid();
74 LifetimePosition last_cut = LifetimePosition::Invalid();
75
76 while (interval != nullptr) {
77 // We have to cache these here, as splintering might destroy the original
78 // interval below.
79 UseInterval* next_interval = interval->next();
80 LifetimePosition interval_end = interval->end();
81 const InstructionBlock* first_block =
82 code->GetInstructionBlock(interval->FirstGapIndex());
83 const InstructionBlock* last_block =
84 code->GetInstructionBlock(interval->LastGapIndex());
85 int first_block_nr = first_block->rpo_number().ToInt();
86 int last_block_nr = last_block->rpo_number().ToInt();
87 for (int block_id = first_block_nr; block_id <= last_block_nr; ++block_id) {
88 const InstructionBlock* current_block =
89 code->InstructionBlockAt(RpoNumber::FromInt(block_id));
90 if (current_block->IsDeferred()) {
91 if (!first_cut.IsValid()) {
92 first_cut = LifetimePosition::GapFromInstructionIndex(
93 current_block->first_instruction_index());
94 }
95 // We splinter until the last gap in the block. I assume this is done to
96 // leave a little range to be allocated by normal register allocation
97 // and then use that range to connect when splinters are merged back.
98 // This might be done as control flow resolution does not insert moves
99 // if two consecutive blocks in rpo order are also consecutive in
100 // control flow.
101 last_cut = LifetimePosition::GapFromInstructionIndex(
102 current_block->last_instruction_index());
103 } else {
104 if (first_cut.IsValid()) {
105 CreateSplinter(range, data, first_cut, last_cut);
106 first_cut = LifetimePosition::Invalid();
107 last_cut = LifetimePosition::Invalid();
108 }
109 }
110 }
111 // If we reach the end of an interval with a first_cut and last_cut set, it
112 // means that we can splinter to the end of the interval, as the value dies
113 // in this control flow branch or is not live in the next block. In the
114 // former case, we won't need to reload the value, so we can splinter to the
115 // end of its lifetime. In the latter case, control flow resolution will
116 // have to connect blocks anyway, so we can also splinter to the end of the
117 // block, too.
118 if (first_cut.IsValid()) {
119 CreateSplinter(range, data, first_cut, interval_end);
120 first_cut = LifetimePosition::Invalid();
121 last_cut = LifetimePosition::Invalid();
122 }
123 interval = next_interval;
124 }
125
126 // Redo has_slot_use
127 if (range->has_slot_use() && range->splinter() != nullptr) {
128 SetSlotUse(range);
129 SetSlotUse(range->splinter());
130 }
131}
132
133} // namespace
134
135void LiveRangeSeparator::Splinter() {
136 size_t virt_reg_count = data()->live_ranges().size();
137 for (size_t vreg = 0; vreg < virt_reg_count; ++vreg) {
138 TopLevelLiveRange* range = data()->live_ranges()[vreg];
139 if (range == nullptr || range->IsEmpty() || range->IsSplinter()) {
140 continue;
141 }
142 int first_instr = range->first_interval()->FirstGapIndex();
143 if (!data()->code()->GetInstructionBlock(first_instr)->IsDeferred()) {
144 SplinterLiveRange(range, data());
145 }
146 }
147}
148
149void LiveRangeMerger::MarkRangesSpilledInDeferredBlocks() {
150 const InstructionSequence* code = data()->code();
151 for (TopLevelLiveRange* top : data()->live_ranges()) {
152 if (top == nullptr || top->IsEmpty() || top->splinter() == nullptr ||
153 top->HasSpillOperand() || !top->splinter()->HasSpillRange()) {
154 continue;
155 }
156
157 LiveRange* child = top;
158 for (; child != nullptr; child = child->next()) {
159 if (child->spilled() ||
160 child->NextSlotPosition(child->Start()) != nullptr) {
161 break;
162 }
163 }
164 if (child == nullptr) {
165 DCHECK(!data()->is_turbo_control_flow_aware_allocation());
166 top->TreatAsSpilledInDeferredBlock(data()->allocation_zone(),
167 code->InstructionBlockCount());
168 }
169 }
170}
171
172void LiveRangeMerger::Merge() {
173 MarkRangesSpilledInDeferredBlocks();
174
175 int live_range_count = static_cast<int>(data()->live_ranges().size());
176 for (int i = 0; i < live_range_count; ++i) {
177 TopLevelLiveRange* range = data()->live_ranges()[i];
178 if (range == nullptr || range->IsEmpty() || !range->IsSplinter()) {
179 continue;
180 }
181 TopLevelLiveRange* splinter_parent = range->splintered_from();
182
183 int to_remove = range->vreg();
184 splinter_parent->Merge(range, data()->allocation_zone());
185 data()->live_ranges()[to_remove] = nullptr;
186 }
187}
188
189#undef TRACE
190
191} // namespace compiler
192} // namespace internal
193} // namespace v8
194