1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/escape-analysis-reducer.h"
6
7#include "src/compiler/all-nodes.h"
8#include "src/compiler/simplified-operator.h"
9#include "src/compiler/type-cache.h"
10#include "src/frame-constants.h"
11
12namespace v8 {
13namespace internal {
14namespace compiler {
15
16#ifdef DEBUG
17#define TRACE(...) \
18 do { \
19 if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
20 } while (false)
21#else
22#define TRACE(...)
23#endif // DEBUG
24
25EscapeAnalysisReducer::EscapeAnalysisReducer(
26 Editor* editor, JSGraph* jsgraph, EscapeAnalysisResult analysis_result,
27 Zone* zone)
28 : AdvancedReducer(editor),
29 jsgraph_(jsgraph),
30 analysis_result_(analysis_result),
31 object_id_cache_(zone),
32 node_cache_(jsgraph->graph(), zone),
33 arguments_elements_(zone),
34 zone_(zone) {}
35
36Reduction EscapeAnalysisReducer::ReplaceNode(Node* original,
37 Node* replacement) {
38 const VirtualObject* vobject =
39 analysis_result().GetVirtualObject(replacement);
40 if (replacement->opcode() == IrOpcode::kDead ||
41 (vobject && !vobject->HasEscaped())) {
42 RelaxEffectsAndControls(original);
43 return Replace(replacement);
44 }
45 Type const replacement_type = NodeProperties::GetType(replacement);
46 Type const original_type = NodeProperties::GetType(original);
47 if (replacement_type.Is(original_type)) {
48 RelaxEffectsAndControls(original);
49 return Replace(replacement);
50 }
51
52 // We need to guard the replacement if we would widen the type otherwise.
53 DCHECK_EQ(1, original->op()->EffectOutputCount());
54 DCHECK_EQ(1, original->op()->EffectInputCount());
55 DCHECK_EQ(1, original->op()->ControlInputCount());
56 Node* effect = NodeProperties::GetEffectInput(original);
57 Node* control = NodeProperties::GetControlInput(original);
58 original->TrimInputCount(0);
59 original->AppendInput(jsgraph()->zone(), replacement);
60 original->AppendInput(jsgraph()->zone(), effect);
61 original->AppendInput(jsgraph()->zone(), control);
62 NodeProperties::SetType(
63 original,
64 Type::Intersect(original_type, replacement_type, jsgraph()->zone()));
65 NodeProperties::ChangeOp(original,
66 jsgraph()->common()->TypeGuard(original_type));
67 ReplaceWithValue(original, original, original, control);
68 return NoChange();
69}
70
71namespace {
72
73Node* SkipTypeGuards(Node* node) {
74 while (node->opcode() == IrOpcode::kTypeGuard) {
75 node = NodeProperties::GetValueInput(node, 0);
76 }
77 return node;
78}
79
80} // namespace
81
82Node* EscapeAnalysisReducer::ObjectIdNode(const VirtualObject* vobject) {
83 VirtualObject::Id id = vobject->id();
84 if (id >= object_id_cache_.size()) object_id_cache_.resize(id + 1);
85 if (!object_id_cache_[id]) {
86 Node* node = jsgraph()->graph()->NewNode(jsgraph()->common()->ObjectId(id));
87 NodeProperties::SetType(node, Type::Object());
88 object_id_cache_[id] = node;
89 }
90 return object_id_cache_[id];
91}
92
93Reduction EscapeAnalysisReducer::Reduce(Node* node) {
94 if (Node* replacement = analysis_result().GetReplacementOf(node)) {
95 DCHECK(node->opcode() != IrOpcode::kAllocate &&
96 node->opcode() != IrOpcode::kFinishRegion);
97 DCHECK_NE(replacement, node);
98 return ReplaceNode(node, replacement);
99 }
100
101 switch (node->opcode()) {
102 case IrOpcode::kAllocate:
103 case IrOpcode::kTypeGuard: {
104 const VirtualObject* vobject = analysis_result().GetVirtualObject(node);
105 if (vobject && !vobject->HasEscaped()) {
106 RelaxEffectsAndControls(node);
107 }
108 return NoChange();
109 }
110 case IrOpcode::kFinishRegion: {
111 Node* effect = NodeProperties::GetEffectInput(node, 0);
112 if (effect->opcode() == IrOpcode::kBeginRegion) {
113 RelaxEffectsAndControls(effect);
114 RelaxEffectsAndControls(node);
115 }
116 return NoChange();
117 }
118 case IrOpcode::kNewArgumentsElements:
119 arguments_elements_.insert(node);
120 return NoChange();
121 default: {
122 // TODO(sigurds): Change this to GetFrameStateInputCount once
123 // it is working. For now we use EffectInputCount > 0 to determine
124 // whether a node might have a frame state input.
125 if (node->op()->EffectInputCount() > 0) {
126 ReduceFrameStateInputs(node);
127 }
128 return NoChange();
129 }
130 }
131}
132
133// While doing DFS on the FrameState tree, we have to recognize duplicate
134// occurrences of virtual objects.
135class Deduplicator {
136 public:
137 explicit Deduplicator(Zone* zone) : is_duplicate_(zone) {}
138 bool SeenBefore(const VirtualObject* vobject) {
139 VirtualObject::Id id = vobject->id();
140 if (id >= is_duplicate_.size()) {
141 is_duplicate_.resize(id + 1);
142 }
143 bool is_duplicate = is_duplicate_[id];
144 is_duplicate_[id] = true;
145 return is_duplicate;
146 }
147
148 private:
149 ZoneVector<bool> is_duplicate_;
150};
151
152void EscapeAnalysisReducer::ReduceFrameStateInputs(Node* node) {
153 DCHECK_GE(node->op()->EffectInputCount(), 1);
154 for (int i = 0; i < node->InputCount(); ++i) {
155 Node* input = node->InputAt(i);
156 if (input->opcode() == IrOpcode::kFrameState) {
157 Deduplicator deduplicator(zone());
158 if (Node* ret = ReduceDeoptState(input, node, &deduplicator)) {
159 node->ReplaceInput(i, ret);
160 }
161 }
162 }
163}
164
165Node* EscapeAnalysisReducer::ReduceDeoptState(Node* node, Node* effect,
166 Deduplicator* deduplicator) {
167 if (node->opcode() == IrOpcode::kFrameState) {
168 NodeHashCache::Constructor new_node(&node_cache_, node);
169 // This input order is important to match the DFS traversal used in the
170 // instruction selector. Otherwise, the instruction selector might find a
171 // duplicate node before the original one.
172 for (int input_id : {kFrameStateOuterStateInput, kFrameStateFunctionInput,
173 kFrameStateParametersInput, kFrameStateContextInput,
174 kFrameStateLocalsInput, kFrameStateStackInput}) {
175 Node* input = node->InputAt(input_id);
176 new_node.ReplaceInput(ReduceDeoptState(input, effect, deduplicator),
177 input_id);
178 }
179 return new_node.Get();
180 } else if (node->opcode() == IrOpcode::kStateValues) {
181 NodeHashCache::Constructor new_node(&node_cache_, node);
182 for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
183 Node* input = NodeProperties::GetValueInput(node, i);
184 new_node.ReplaceValueInput(ReduceDeoptState(input, effect, deduplicator),
185 i);
186 }
187 return new_node.Get();
188 } else if (const VirtualObject* vobject =
189 analysis_result().GetVirtualObject(SkipTypeGuards(node))) {
190 if (vobject->HasEscaped()) return node;
191 if (deduplicator->SeenBefore(vobject)) {
192 return ObjectIdNode(vobject);
193 } else {
194 std::vector<Node*> inputs;
195 for (int offset = 0; offset < vobject->size(); offset += kTaggedSize) {
196 Node* field =
197 analysis_result().GetVirtualObjectField(vobject, offset, effect);
198 CHECK_NOT_NULL(field);
199 if (field != jsgraph()->Dead()) {
200 inputs.push_back(ReduceDeoptState(field, effect, deduplicator));
201 }
202 }
203 int num_inputs = static_cast<int>(inputs.size());
204 NodeHashCache::Constructor new_node(
205 &node_cache_,
206 jsgraph()->common()->ObjectState(vobject->id(), num_inputs),
207 num_inputs, &inputs.front(), NodeProperties::GetType(node));
208 return new_node.Get();
209 }
210 } else {
211 return node;
212 }
213}
214
215void EscapeAnalysisReducer::VerifyReplacement() const {
216 AllNodes all(zone(), jsgraph()->graph());
217 for (Node* node : all.reachable) {
218 if (node->opcode() == IrOpcode::kAllocate) {
219 if (const VirtualObject* vobject =
220 analysis_result().GetVirtualObject(node)) {
221 if (!vobject->HasEscaped()) {
222 FATAL("Escape analysis failed to remove node %s#%d\n",
223 node->op()->mnemonic(), node->id());
224 }
225 }
226 }
227 }
228}
229
230void EscapeAnalysisReducer::Finalize() {
231 for (Node* node : arguments_elements_) {
232 int mapped_count = NewArgumentsElementsMappedCountOf(node->op());
233
234 Node* arguments_frame = NodeProperties::GetValueInput(node, 0);
235 if (arguments_frame->opcode() != IrOpcode::kArgumentsFrame) continue;
236 Node* arguments_length = NodeProperties::GetValueInput(node, 1);
237 if (arguments_length->opcode() != IrOpcode::kArgumentsLength) continue;
238
239 // If mapped arguments are specified, then their number is always equal to
240 // the number of formal parameters. This allows to use just the three-value
241 // {ArgumentsStateType} enum because the deoptimizer can reconstruct the
242 // value of {mapped_count} from the number of formal parameters.
243 DCHECK_IMPLIES(
244 mapped_count != 0,
245 mapped_count == FormalParameterCountOf(arguments_length->op()));
246 ArgumentsStateType type = IsRestLengthOf(arguments_length->op())
247 ? ArgumentsStateType::kRestParameter
248 : (mapped_count == 0)
249 ? ArgumentsStateType::kUnmappedArguments
250 : ArgumentsStateType::kMappedArguments;
251
252 Node* arguments_length_state = nullptr;
253 for (Edge edge : arguments_length->use_edges()) {
254 Node* use = edge.from();
255 switch (use->opcode()) {
256 case IrOpcode::kObjectState:
257 case IrOpcode::kTypedObjectState:
258 case IrOpcode::kStateValues:
259 case IrOpcode::kTypedStateValues:
260 if (!arguments_length_state) {
261 arguments_length_state = jsgraph()->graph()->NewNode(
262 jsgraph()->common()->ArgumentsLengthState(type));
263 NodeProperties::SetType(arguments_length_state,
264 Type::OtherInternal());
265 }
266 edge.UpdateTo(arguments_length_state);
267 break;
268 default:
269 break;
270 }
271 }
272
273 bool escaping_use = false;
274 ZoneVector<Node*> loads(zone());
275 for (Edge edge : node->use_edges()) {
276 Node* use = edge.from();
277 if (!NodeProperties::IsValueEdge(edge)) continue;
278 if (use->use_edges().empty()) {
279 // A node without uses is dead, so we don't have to care about it.
280 continue;
281 }
282 switch (use->opcode()) {
283 case IrOpcode::kStateValues:
284 case IrOpcode::kTypedStateValues:
285 case IrOpcode::kObjectState:
286 case IrOpcode::kTypedObjectState:
287 break;
288 case IrOpcode::kLoadElement:
289 if (mapped_count == 0) {
290 loads.push_back(use);
291 } else {
292 escaping_use = true;
293 }
294 break;
295 case IrOpcode::kLoadField:
296 if (FieldAccessOf(use->op()).offset == FixedArray::kLengthOffset) {
297 loads.push_back(use);
298 } else {
299 escaping_use = true;
300 }
301 break;
302 default:
303 // If the arguments elements node node is used by an unhandled node,
304 // then we cannot remove this allocation.
305 escaping_use = true;
306 break;
307 }
308 if (escaping_use) break;
309 }
310 if (!escaping_use) {
311 Node* arguments_elements_state = jsgraph()->graph()->NewNode(
312 jsgraph()->common()->ArgumentsElementsState(type));
313 NodeProperties::SetType(arguments_elements_state, Type::OtherInternal());
314 ReplaceWithValue(node, arguments_elements_state);
315
316 for (Node* load : loads) {
317 switch (load->opcode()) {
318 case IrOpcode::kLoadElement: {
319 Node* index = NodeProperties::GetValueInput(load, 1);
320 // {offset} is a reverted index starting from 1. The base address is
321 // adapted to allow offsets starting from 1.
322 Node* offset = jsgraph()->graph()->NewNode(
323 jsgraph()->simplified()->NumberSubtract(), arguments_length,
324 index);
325 NodeProperties::SetType(offset,
326 TypeCache::Get()->kArgumentsLengthType);
327 NodeProperties::ReplaceValueInput(load, arguments_frame, 0);
328 NodeProperties::ReplaceValueInput(load, offset, 1);
329 NodeProperties::ChangeOp(
330 load, jsgraph()->simplified()->LoadStackArgument());
331 break;
332 }
333 case IrOpcode::kLoadField: {
334 DCHECK_EQ(FieldAccessOf(load->op()).offset,
335 FixedArray::kLengthOffset);
336 Node* length = NodeProperties::GetValueInput(node, 1);
337 ReplaceWithValue(load, length);
338 break;
339 }
340 default:
341 UNREACHABLE();
342 }
343 }
344 }
345 }
346}
347
348Node* NodeHashCache::Query(Node* node) {
349 auto it = cache_.find(node);
350 if (it != cache_.end()) {
351 return *it;
352 } else {
353 return nullptr;
354 }
355}
356
357NodeHashCache::Constructor::Constructor(NodeHashCache* cache,
358 const Operator* op, int input_count,
359 Node** inputs, Type type)
360 : node_cache_(cache), from_(nullptr) {
361 if (node_cache_->temp_nodes_.size() > 0) {
362 tmp_ = node_cache_->temp_nodes_.back();
363 node_cache_->temp_nodes_.pop_back();
364 int tmp_input_count = tmp_->InputCount();
365 if (input_count <= tmp_input_count) {
366 tmp_->TrimInputCount(input_count);
367 }
368 for (int i = 0; i < input_count; ++i) {
369 if (i < tmp_input_count) {
370 tmp_->ReplaceInput(i, inputs[i]);
371 } else {
372 tmp_->AppendInput(node_cache_->graph_->zone(), inputs[i]);
373 }
374 }
375 NodeProperties::ChangeOp(tmp_, op);
376 } else {
377 tmp_ = node_cache_->graph_->NewNode(op, input_count, inputs);
378 }
379 NodeProperties::SetType(tmp_, type);
380}
381
382Node* NodeHashCache::Constructor::Get() {
383 DCHECK(tmp_ || from_);
384 Node* node;
385 if (!tmp_) {
386 node = node_cache_->Query(from_);
387 if (!node) node = from_;
388 } else {
389 node = node_cache_->Query(tmp_);
390 if (node) {
391 node_cache_->temp_nodes_.push_back(tmp_);
392 } else {
393 node = tmp_;
394 node_cache_->Insert(node);
395 }
396 }
397 tmp_ = from_ = nullptr;
398 return node;
399}
400
401Node* NodeHashCache::Constructor::MutableNode() {
402 DCHECK(tmp_ || from_);
403 if (!tmp_) {
404 if (node_cache_->temp_nodes_.empty()) {
405 tmp_ = node_cache_->graph_->CloneNode(from_);
406 } else {
407 tmp_ = node_cache_->temp_nodes_.back();
408 node_cache_->temp_nodes_.pop_back();
409 int from_input_count = from_->InputCount();
410 int tmp_input_count = tmp_->InputCount();
411 if (from_input_count <= tmp_input_count) {
412 tmp_->TrimInputCount(from_input_count);
413 }
414 for (int i = 0; i < from_input_count; ++i) {
415 if (i < tmp_input_count) {
416 tmp_->ReplaceInput(i, from_->InputAt(i));
417 } else {
418 tmp_->AppendInput(node_cache_->graph_->zone(), from_->InputAt(i));
419 }
420 }
421 NodeProperties::SetType(tmp_, NodeProperties::GetType(from_));
422 NodeProperties::ChangeOp(tmp_, from_->op());
423 }
424 }
425 return tmp_;
426}
427
428#undef TRACE
429
430} // namespace compiler
431} // namespace internal
432} // namespace v8
433