blob: af0ba6a63986dc58004798a2386d348514acf734 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/escape-analysis.h"
6
7#include <limits>
8
9#include "src/base/flags.h"
10#include "src/bootstrapper.h"
11#include "src/compilation-dependencies.h"
12#include "src/compiler/common-operator.h"
13#include "src/compiler/graph-reducer.h"
14#include "src/compiler/js-operator.h"
15#include "src/compiler/node.h"
16#include "src/compiler/node-matchers.h"
17#include "src/compiler/node-properties.h"
18#include "src/compiler/operator-properties.h"
19#include "src/compiler/simplified-operator.h"
20#include "src/objects-inl.h"
21#include "src/type-cache.h"
22
23namespace v8 {
24namespace internal {
25namespace compiler {
26
27const EscapeAnalysis::Alias EscapeAnalysis::kNotReachable =
28 std::numeric_limits<Alias>::max();
29const EscapeAnalysis::Alias EscapeAnalysis::kUntrackable =
30 std::numeric_limits<Alias>::max() - 1;
31
32
33class VirtualObject : public ZoneObject {
34 public:
35 enum Status { kUntracked = 0, kTracked = 1 };
36 VirtualObject(NodeId id, Zone* zone)
37 : id_(id),
38 status_(kUntracked),
39 fields_(zone),
40 phi_(zone),
41 object_state_(nullptr) {}
42
43 VirtualObject(const VirtualObject& other)
44 : id_(other.id_),
45 status_(other.status_),
46 fields_(other.fields_),
47 phi_(other.phi_),
48 object_state_(other.object_state_) {}
49
50 VirtualObject(NodeId id, Zone* zone, size_t field_number)
51 : id_(id),
52 status_(kTracked),
53 fields_(zone),
54 phi_(zone),
55 object_state_(nullptr) {
56 fields_.resize(field_number);
57 phi_.resize(field_number, false);
58 }
59
60 Node* GetField(size_t offset) {
61 if (offset < fields_.size()) {
62 return fields_[offset];
63 }
64 return nullptr;
65 }
66
67 bool IsCreatedPhi(size_t offset) {
68 if (offset < phi_.size()) {
69 return phi_[offset];
70 }
71 return false;
72 }
73
74 bool SetField(size_t offset, Node* node, bool created_phi = false) {
75 bool changed = fields_[offset] != node || phi_[offset] != created_phi;
76 fields_[offset] = node;
77 phi_[offset] = created_phi;
78 if (changed && FLAG_trace_turbo_escape && node) {
79 PrintF("Setting field %zu of #%d to #%d (%s)\n", offset, id(), node->id(),
80 node->op()->mnemonic());
81 }
82 return changed;
83 }
84 bool IsVirtual() const { return status_ == kTracked; }
85 bool IsTracked() const { return status_ != kUntracked; }
86
87 Node** fields_array() { return &fields_.front(); }
88 size_t field_count() { return fields_.size(); }
89 bool ResizeFields(size_t field_count) {
90 if (field_count != fields_.size()) {
91 fields_.resize(field_count);
92 phi_.resize(field_count);
93 return true;
94 }
95 return false;
96 }
97 bool ClearAllFields() {
98 bool changed = false;
99 for (size_t i = 0; i < fields_.size(); ++i) {
100 if (fields_[i] != nullptr) {
101 fields_[i] = nullptr;
102 changed = true;
103 }
104 phi_[i] = false;
105 }
106 return changed;
107 }
108 bool UpdateFrom(const VirtualObject& other);
109 void SetObjectState(Node* node) { object_state_ = node; }
110 Node* GetObjectState() const { return object_state_; }
111
112 NodeId id() const { return id_; }
113 void id(NodeId id) { id_ = id; }
114
115 private:
116 NodeId id_;
117 Status status_;
118 ZoneVector<Node*> fields_;
119 ZoneVector<bool> phi_;
120 Node* object_state_;
121};
122
123
124bool VirtualObject::UpdateFrom(const VirtualObject& other) {
125 bool changed = status_ != other.status_;
126 status_ = other.status_;
127 if (fields_.size() != other.fields_.size()) {
128 fields_ = other.fields_;
129 return true;
130 }
131 for (size_t i = 0; i < fields_.size(); ++i) {
132 if (fields_[i] != other.fields_[i]) {
133 changed = true;
134 fields_[i] = other.fields_[i];
135 }
136 }
137 return changed;
138}
139
140
141class VirtualState : public ZoneObject {
142 public:
143 VirtualState(Zone* zone, size_t size);
144 VirtualState(const VirtualState& states);
145
146 VirtualObject* VirtualObjectFromAlias(size_t alias);
147 VirtualObject* GetOrCreateTrackedVirtualObject(EscapeAnalysis::Alias alias,
148 NodeId id, Zone* zone);
149 void SetVirtualObject(EscapeAnalysis::Alias alias, VirtualObject* state);
150 void LastChangedAt(Node* node) { last_changed_ = node; }
151 Node* GetLastChanged() { return last_changed_; }
152 bool UpdateFrom(VirtualState* state, Zone* zone);
153 bool MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
154 CommonOperatorBuilder* common, Node* control);
155 size_t size() const { return info_.size(); }
156
157 private:
158 ZoneVector<VirtualObject*> info_;
159 Node* last_changed_;
160};
161
162
163class MergeCache : public ZoneObject {
164 public:
165 explicit MergeCache(Zone* zone)
166 : states_(zone), objects_(zone), fields_(zone) {
167 states_.reserve(4);
168 objects_.reserve(4);
169 fields_.reserve(4);
170 }
171 ZoneVector<VirtualState*>& states() { return states_; }
172 ZoneVector<VirtualObject*>& objects() { return objects_; }
173 ZoneVector<Node*>& fields() { return fields_; }
174 void Clear() {
175 states_.clear();
176 objects_.clear();
177 fields_.clear();
178 }
179 size_t LoadVirtualObjectsFromStatesFor(EscapeAnalysis::Alias alias);
180 void LoadVirtualObjectsForFieldsFrom(
181 VirtualState* state, const ZoneVector<EscapeAnalysis::Alias>& aliases);
182 Node* GetFields(size_t pos);
183
184 private:
185 ZoneVector<VirtualState*> states_;
186 ZoneVector<VirtualObject*> objects_;
187 ZoneVector<Node*> fields_;
188};
189
190
191size_t MergeCache::LoadVirtualObjectsFromStatesFor(
192 EscapeAnalysis::Alias alias) {
193 objects_.clear();
194 DCHECK_GT(states_.size(), 0u);
195 size_t min = std::numeric_limits<size_t>::max();
196 for (VirtualState* state : states_) {
197 if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
198 objects_.push_back(obj);
199 min = std::min(obj->field_count(), min);
200 }
201 }
202 return min;
203}
204
205
206void MergeCache::LoadVirtualObjectsForFieldsFrom(
207 VirtualState* state, const ZoneVector<EscapeAnalysis::Alias>& aliases) {
208 objects_.clear();
209 size_t max_alias = state->size();
210 for (Node* field : fields_) {
211 EscapeAnalysis::Alias alias = aliases[field->id()];
212 if (alias >= max_alias) continue;
213 if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
214 objects_.push_back(obj);
215 }
216 }
217}
218
219
220Node* MergeCache::GetFields(size_t pos) {
221 fields_.clear();
222 Node* rep = objects_.front()->GetField(pos);
223 for (VirtualObject* obj : objects_) {
224 Node* field = obj->GetField(pos);
225 if (field) {
226 fields_.push_back(field);
227 }
228 if (field != rep) {
229 rep = nullptr;
230 }
231 }
232 return rep;
233}
234
235
236VirtualState::VirtualState(Zone* zone, size_t size)
237 : info_(size, nullptr, zone), last_changed_(nullptr) {}
238
239
240VirtualState::VirtualState(const VirtualState& state)
241 : info_(state.info_.size(), nullptr, state.info_.get_allocator().zone()),
242 last_changed_(state.last_changed_) {
243 for (size_t i = 0; i < state.info_.size(); ++i) {
244 if (state.info_[i]) {
245 info_[i] =
246 new (info_.get_allocator().zone()) VirtualObject(*state.info_[i]);
247 }
248 }
249}
250
251
252VirtualObject* VirtualState::VirtualObjectFromAlias(size_t alias) {
253 return info_[alias];
254}
255
256
257VirtualObject* VirtualState::GetOrCreateTrackedVirtualObject(
258 EscapeAnalysis::Alias alias, NodeId id, Zone* zone) {
259 if (VirtualObject* obj = VirtualObjectFromAlias(alias)) {
260 return obj;
261 }
262 VirtualObject* obj = new (zone) VirtualObject(id, zone, 0);
263 SetVirtualObject(alias, obj);
264 return obj;
265}
266
267
268void VirtualState::SetVirtualObject(EscapeAnalysis::Alias alias,
269 VirtualObject* obj) {
270 info_[alias] = obj;
271}
272
273
274bool VirtualState::UpdateFrom(VirtualState* from, Zone* zone) {
275 bool changed = false;
276 for (EscapeAnalysis::Alias alias = 0; alias < size(); ++alias) {
277 VirtualObject* ls = VirtualObjectFromAlias(alias);
278 VirtualObject* rs = from->VirtualObjectFromAlias(alias);
279
280 if (rs == nullptr) {
281 continue;
282 }
283
284 if (ls == nullptr) {
285 ls = new (zone) VirtualObject(*rs);
286 SetVirtualObject(alias, ls);
287 changed = true;
288 continue;
289 }
290
291 if (FLAG_trace_turbo_escape) {
292 PrintF(" Updating fields of @%d\n", alias);
293 }
294
295 changed = ls->UpdateFrom(*rs) || changed;
296 }
297 return false;
298}
299
300
301namespace {
302
303bool IsEquivalentPhi(Node* node1, Node* node2) {
304 if (node1 == node2) return true;
305 if (node1->opcode() != IrOpcode::kPhi || node2->opcode() != IrOpcode::kPhi ||
306 node1->op()->ValueInputCount() != node2->op()->ValueInputCount()) {
307 return false;
308 }
309 for (int i = 0; i < node1->op()->ValueInputCount(); ++i) {
310 Node* input1 = NodeProperties::GetValueInput(node1, i);
311 Node* input2 = NodeProperties::GetValueInput(node2, i);
312 if (!IsEquivalentPhi(input1, input2)) {
313 return false;
314 }
315 }
316 return true;
317}
318
319
320bool IsEquivalentPhi(Node* phi, ZoneVector<Node*>& inputs) {
321 if (phi->opcode() != IrOpcode::kPhi) return false;
322 if (phi->op()->ValueInputCount() != inputs.size()) {
323 return false;
324 }
325 for (size_t i = 0; i < inputs.size(); ++i) {
326 Node* input = NodeProperties::GetValueInput(phi, static_cast<int>(i));
327 if (!IsEquivalentPhi(input, inputs[i])) {
328 return false;
329 }
330 }
331 return true;
332}
333
334} // namespace
335
336
337Node* EscapeAnalysis::GetReplacementIfSame(ZoneVector<VirtualObject*>& objs) {
338 Node* rep = GetReplacement(objs.front()->id());
339 for (VirtualObject* obj : objs) {
340 if (GetReplacement(obj->id()) != rep) {
341 return nullptr;
342 }
343 }
344 return rep;
345}
346
347
348bool VirtualState::MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
349 CommonOperatorBuilder* common, Node* control) {
350 DCHECK_GT(cache->states().size(), 0u);
351 bool changed = false;
352 for (EscapeAnalysis::Alias alias = 0; alias < size(); ++alias) {
353 size_t fields = cache->LoadVirtualObjectsFromStatesFor(alias);
354 if (cache->objects().size() == cache->states().size()) {
355 if (FLAG_trace_turbo_escape) {
356 PrintF(" Merging virtual objects of @%d\n", alias);
357 }
358 VirtualObject* mergeObject = GetOrCreateTrackedVirtualObject(
359 alias, cache->objects().front()->id(), zone);
360 changed = mergeObject->ResizeFields(fields) || changed;
361 for (size_t i = 0; i < fields; ++i) {
362 if (Node* field = cache->GetFields(i)) {
363 changed = mergeObject->SetField(i, field) || changed;
364 if (FLAG_trace_turbo_escape) {
365 PrintF(" Field %zu agree on rep #%d\n", i, field->id());
366 }
367 } else {
368 int value_input_count = static_cast<int>(cache->fields().size());
369 if (cache->fields().size() == cache->objects().size()) {
370 Node* rep = mergeObject->GetField(i);
371 if (!rep || !mergeObject->IsCreatedPhi(i)) {
372 cache->fields().push_back(control);
373 Node* phi = graph->NewNode(
374 common->Phi(MachineRepresentation::kTagged,
375 value_input_count),
376 value_input_count + 1, &cache->fields().front());
377 mergeObject->SetField(i, phi, true);
378 if (FLAG_trace_turbo_escape) {
379 PrintF(" Creating Phi #%d as merge of", phi->id());
380 for (int i = 0; i < value_input_count; i++) {
381 PrintF(" #%d (%s)", cache->fields()[i]->id(),
382 cache->fields()[i]->op()->mnemonic());
383 }
384 PrintF("\n");
385 }
386 changed = true;
387 } else {
388 DCHECK(rep->opcode() == IrOpcode::kPhi);
389 for (int n = 0; n < value_input_count; ++n) {
390 if (n < rep->op()->ValueInputCount()) {
391 Node* old = NodeProperties::GetValueInput(rep, n);
392 if (old != cache->fields()[n]) {
393 changed = true;
394 NodeProperties::ReplaceValueInput(rep, cache->fields()[n],
395 n);
396 }
397 } else {
398 changed = true;
399 rep->InsertInput(graph->zone(), n, cache->fields()[n]);
400 }
401 }
402 if (rep->op()->ValueInputCount() != value_input_count) {
403 if (FLAG_trace_turbo_escape) {
404 PrintF(" Widening Phi #%d of arity %d to %d", rep->id(),
405 rep->op()->ValueInputCount(), value_input_count);
406 }
407 NodeProperties::ChangeOp(
408 rep, common->Phi(MachineRepresentation::kTagged,
409 value_input_count));
410 }
411 }
412 } else {
413 changed = mergeObject->SetField(i, nullptr) || changed;
414 }
415 }
416 }
417 } else {
418 SetVirtualObject(alias, nullptr);
419 }
420 }
421 return changed;
422}
423
424
425EscapeStatusAnalysis::EscapeStatusAnalysis(EscapeAnalysis* object_analysis,
426 Graph* graph, Zone* zone)
427 : object_analysis_(object_analysis),
428 graph_(graph),
429 zone_(zone),
430 status_(graph->NodeCount(), kUnknown, zone),
431 queue_(zone) {}
432
433
434EscapeStatusAnalysis::~EscapeStatusAnalysis() {}
435
436
437bool EscapeStatusAnalysis::HasEntry(Node* node) {
438 return status_[node->id()] & (kTracked | kEscaped);
439}
440
441
442bool EscapeStatusAnalysis::IsVirtual(Node* node) {
443 return (status_[node->id()] & kTracked) && !(status_[node->id()] & kEscaped);
444}
445
446
447bool EscapeStatusAnalysis::IsEscaped(Node* node) {
448 return status_[node->id()] & kEscaped;
449}
450
451
452bool EscapeStatusAnalysis::IsAllocation(Node* node) {
453 return node->opcode() == IrOpcode::kAllocate ||
454 node->opcode() == IrOpcode::kFinishRegion;
455}
456
457
458bool EscapeStatusAnalysis::SetEscaped(Node* node) {
459 bool changed = !(status_[node->id()] & kEscaped);
460 status_[node->id()] |= kEscaped | kTracked;
461 return changed;
462}
463
464
465void EscapeStatusAnalysis::Resize() {
466 status_.resize(graph()->NodeCount(), kUnknown);
467}
468
469
470size_t EscapeStatusAnalysis::size() { return status_.size(); }
471
472
473void EscapeStatusAnalysis::Run() {
474 Resize();
475 queue_.push_back(graph()->end());
476 status_[graph()->end()->id()] |= kOnStack;
477 while (!queue_.empty()) {
478 Node* node = queue_.front();
479 queue_.pop_front();
480 status_[node->id()] &= ~kOnStack;
481 Process(node);
482 status_[node->id()] |= kVisited;
483 for (Edge edge : node->input_edges()) {
484 Node* input = edge.to();
485 if (!(status_[input->id()] & (kVisited | kOnStack))) {
486 queue_.push_back(input);
487 status_[input->id()] |= kOnStack;
488 }
489 }
490 }
491}
492
493
494void EscapeStatusAnalysis::RevisitInputs(Node* node) {
495 for (Edge edge : node->input_edges()) {
496 Node* input = edge.to();
497 if (!(status_[input->id()] & kOnStack)) {
498 queue_.push_back(input);
499 status_[input->id()] |= kOnStack;
500 }
501 }
502}
503
504
505void EscapeStatusAnalysis::RevisitUses(Node* node) {
506 for (Edge edge : node->use_edges()) {
507 Node* use = edge.from();
508 if (!(status_[use->id()] & kOnStack)) {
509 queue_.push_back(use);
510 status_[use->id()] |= kOnStack;
511 }
512 }
513}
514
515
516void EscapeStatusAnalysis::Process(Node* node) {
517 switch (node->opcode()) {
518 case IrOpcode::kAllocate:
519 ProcessAllocate(node);
520 break;
521 case IrOpcode::kFinishRegion:
522 ProcessFinishRegion(node);
523 break;
524 case IrOpcode::kStoreField:
525 ProcessStoreField(node);
526 break;
527 case IrOpcode::kStoreElement:
528 ProcessStoreElement(node);
529 break;
530 case IrOpcode::kLoadField:
531 case IrOpcode::kLoadElement: {
532 if (Node* rep = object_analysis_->GetReplacement(node)) {
533 if (IsAllocation(rep) && CheckUsesForEscape(node, rep)) {
534 RevisitInputs(rep);
535 RevisitUses(rep);
536 }
537 }
538 break;
539 }
540 case IrOpcode::kPhi:
541 if (!HasEntry(node)) {
542 status_[node->id()] |= kTracked;
543 if (!IsAllocationPhi(node)) {
544 SetEscaped(node);
545 RevisitUses(node);
546 }
547 }
548 CheckUsesForEscape(node);
549 default:
550 break;
551 }
552}
553
554
555bool EscapeStatusAnalysis::IsAllocationPhi(Node* node) {
556 for (Edge edge : node->input_edges()) {
557 Node* input = edge.to();
558 if (input->opcode() == IrOpcode::kPhi && !IsEscaped(input)) continue;
559 if (IsAllocation(input)) continue;
560 return false;
561 }
562 return true;
563}
564
565
566void EscapeStatusAnalysis::ProcessStoreField(Node* node) {
567 DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
568 Node* to = NodeProperties::GetValueInput(node, 0);
569 Node* val = NodeProperties::GetValueInput(node, 1);
570 if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
571 RevisitUses(val);
572 RevisitInputs(val);
573 if (FLAG_trace_turbo_escape) {
574 PrintF("Setting #%d (%s) to escaped because of store to field of #%d\n",
575 val->id(), val->op()->mnemonic(), to->id());
576 }
577 }
578}
579
580
581void EscapeStatusAnalysis::ProcessStoreElement(Node* node) {
582 DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
583 Node* to = NodeProperties::GetValueInput(node, 0);
584 Node* val = NodeProperties::GetValueInput(node, 2);
585 if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
586 RevisitUses(val);
587 RevisitInputs(val);
588 if (FLAG_trace_turbo_escape) {
589 PrintF("Setting #%d (%s) to escaped because of store to field of #%d\n",
590 val->id(), val->op()->mnemonic(), to->id());
591 }
592 }
593}
594
595
596void EscapeStatusAnalysis::ProcessAllocate(Node* node) {
597 DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
598 if (!HasEntry(node)) {
599 status_[node->id()] |= kTracked;
600 if (FLAG_trace_turbo_escape) {
601 PrintF("Created status entry for node #%d (%s)\n", node->id(),
602 node->op()->mnemonic());
603 }
604 NumberMatcher size(node->InputAt(0));
605 DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
606 node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
607 node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
608 node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
609 if (!size.HasValue() && SetEscaped(node)) {
610 RevisitUses(node);
611 if (FLAG_trace_turbo_escape) {
612 PrintF("Setting #%d to escaped because of non-const alloc\n",
613 node->id());
614 }
615 // This node is known to escape, uses do not have to be checked.
616 return;
617 }
618 }
619 if (CheckUsesForEscape(node, true)) {
620 RevisitUses(node);
621 }
622}
623
624
625bool EscapeStatusAnalysis::CheckUsesForEscape(Node* uses, Node* rep,
626 bool phi_escaping) {
627 for (Edge edge : uses->use_edges()) {
628 Node* use = edge.from();
629 if (edge.index() >= use->op()->ValueInputCount() +
630 OperatorProperties::GetContextInputCount(use->op()))
631 continue;
632 switch (use->opcode()) {
633 case IrOpcode::kPhi:
634 if (phi_escaping && SetEscaped(rep)) {
635 if (FLAG_trace_turbo_escape) {
636 PrintF(
637 "Setting #%d (%s) to escaped because of use by phi node "
638 "#%d (%s)\n",
639 rep->id(), rep->op()->mnemonic(), use->id(),
640 use->op()->mnemonic());
641 }
642 return true;
643 }
644 // Fallthrough.
645 case IrOpcode::kStoreField:
646 case IrOpcode::kLoadField:
647 case IrOpcode::kStoreElement:
648 case IrOpcode::kLoadElement:
649 case IrOpcode::kFrameState:
650 case IrOpcode::kStateValues:
651 case IrOpcode::kReferenceEqual:
652 case IrOpcode::kFinishRegion:
653 if (IsEscaped(use) && SetEscaped(rep)) {
654 if (FLAG_trace_turbo_escape) {
655 PrintF(
656 "Setting #%d (%s) to escaped because of use by escaping node "
657 "#%d (%s)\n",
658 rep->id(), rep->op()->mnemonic(), use->id(),
659 use->op()->mnemonic());
660 }
661 return true;
662 }
663 break;
664 case IrOpcode::kObjectIsSmi:
665 if (!IsAllocation(rep) && SetEscaped(rep)) {
666 PrintF("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
667 rep->id(), rep->op()->mnemonic(), use->id(),
668 use->op()->mnemonic());
669 return true;
670 }
671 break;
672 default:
673 if (use->op()->EffectInputCount() == 0 &&
674 uses->op()->EffectInputCount() > 0) {
675 PrintF("Encountered unaccounted use by #%d (%s)\n", use->id(),
676 use->op()->mnemonic());
677 UNREACHABLE();
678 }
679 if (SetEscaped(rep)) {
680 if (FLAG_trace_turbo_escape) {
681 PrintF("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
682 rep->id(), rep->op()->mnemonic(), use->id(),
683 use->op()->mnemonic());
684 }
685 return true;
686 }
687 }
688 }
689 return false;
690}
691
692
693void EscapeStatusAnalysis::ProcessFinishRegion(Node* node) {
694 DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
695 if (!HasEntry(node)) {
696 status_[node->id()] |= kTracked;
697 RevisitUses(node);
698 }
699 if (CheckUsesForEscape(node, true)) {
700 RevisitInputs(node);
701 }
702}
703
704
705void EscapeStatusAnalysis::DebugPrint() {
706 for (NodeId id = 0; id < status_.size(); id++) {
707 if (status_[id] & kTracked) {
708 PrintF("Node #%d is %s\n", id,
709 (status_[id] & kEscaped) ? "escaping" : "virtual");
710 }
711 }
712}
713
714
715EscapeAnalysis::EscapeAnalysis(Graph* graph, CommonOperatorBuilder* common,
716 Zone* zone)
717 : graph_(graph),
718 common_(common),
719 zone_(zone),
720 virtual_states_(zone),
721 replacements_(zone),
722 escape_status_(this, graph, zone),
723 cache_(new (zone) MergeCache(zone)),
724 aliases_(zone),
725 next_free_alias_(0) {}
726
727
728EscapeAnalysis::~EscapeAnalysis() {}
729
730
731void EscapeAnalysis::Run() {
732 replacements_.resize(graph()->NodeCount());
733 AssignAliases();
734 RunObjectAnalysis();
735 escape_status_.Run();
736}
737
738
739void EscapeAnalysis::AssignAliases() {
740 ZoneVector<Node*> stack(zone());
741 stack.push_back(graph()->end());
742 CHECK_LT(graph()->NodeCount(), kUntrackable);
743 aliases_.resize(graph()->NodeCount(), kNotReachable);
744 aliases_[graph()->end()->id()] = kUntrackable;
745 while (!stack.empty()) {
746 Node* node = stack.back();
747 stack.pop_back();
748 switch (node->opcode()) {
749 case IrOpcode::kAllocate:
750 if (aliases_[node->id()] >= kUntrackable) {
751 aliases_[node->id()] = NextAlias();
752 }
753 break;
754 case IrOpcode::kFinishRegion: {
755 Node* allocate = NodeProperties::GetValueInput(node, 0);
756 if (allocate->opcode() == IrOpcode::kAllocate) {
757 if (aliases_[allocate->id()] >= kUntrackable) {
758 if (aliases_[allocate->id()] == kNotReachable) {
759 stack.push_back(allocate);
760 }
761 aliases_[allocate->id()] = NextAlias();
762 }
763 aliases_[node->id()] = aliases_[allocate->id()];
764 } else {
765 aliases_[node->id()] = NextAlias();
766 }
767 break;
768 }
769 default:
770 DCHECK_EQ(aliases_[node->id()], kUntrackable);
771 break;
772 }
773 for (Edge edge : node->input_edges()) {
774 Node* input = edge.to();
775 if (aliases_[input->id()] == kNotReachable) {
776 stack.push_back(input);
777 aliases_[input->id()] = kUntrackable;
778 }
779 }
780 }
781
782 if (FLAG_trace_turbo_escape) {
783 PrintF("Discovered trackable nodes");
784 for (EscapeAnalysis::Alias id = 0; id < graph()->NodeCount(); ++id) {
785 if (aliases_[id] < kUntrackable) {
786 if (FLAG_trace_turbo_escape) {
787 PrintF(" #%u", id);
788 }
789 }
790 }
791 PrintF("\n");
792 }
793}
794
795
796void EscapeAnalysis::RunObjectAnalysis() {
797 virtual_states_.resize(graph()->NodeCount());
798 ZoneVector<Node*> stack(zone());
799 stack.push_back(graph()->start());
800 while (!stack.empty()) {
801 Node* node = stack.back();
802 stack.pop_back();
803 if (aliases_[node->id()] != kNotReachable && Process(node)) {
804 for (Edge edge : node->use_edges()) {
805 if (NodeProperties::IsEffectEdge(edge)) {
806 Node* use = edge.from();
807 if ((use->opcode() != IrOpcode::kLoadField &&
808 use->opcode() != IrOpcode::kLoadElement) ||
809 !IsDanglingEffectNode(use)) {
810 stack.push_back(use);
811 }
812 }
813 }
814 // First process loads: dangling loads are a problem otherwise.
815 for (Edge edge : node->use_edges()) {
816 if (NodeProperties::IsEffectEdge(edge)) {
817 Node* use = edge.from();
818 if ((use->opcode() == IrOpcode::kLoadField ||
819 use->opcode() == IrOpcode::kLoadElement) &&
820 IsDanglingEffectNode(use)) {
821 stack.push_back(use);
822 }
823 }
824 }
825 }
826 }
827 if (FLAG_trace_turbo_escape) {
828 DebugPrint();
829 }
830}
831
832
833bool EscapeAnalysis::IsDanglingEffectNode(Node* node) {
834 if (node->op()->EffectInputCount() == 0) return false;
835 if (node->op()->EffectOutputCount() == 0) return false;
836 if (node->op()->EffectInputCount() == 1 &&
837 NodeProperties::GetEffectInput(node)->opcode() == IrOpcode::kStart) {
838 // The start node is used as sentinel for nodes that are in general
839 // effectful, but of which an analysis has determined that they do not
840 // produce effects in this instance. We don't consider these nodes dangling.
841 return false;
842 }
843 for (Edge edge : node->use_edges()) {
844 if (NodeProperties::IsEffectEdge(edge)) {
845 return false;
846 }
847 }
848 return true;
849}
850
851
852bool EscapeAnalysis::Process(Node* node) {
853 switch (node->opcode()) {
854 case IrOpcode::kAllocate:
855 ProcessAllocation(node);
856 break;
857 case IrOpcode::kBeginRegion:
858 ForwardVirtualState(node);
859 break;
860 case IrOpcode::kFinishRegion:
861 ProcessFinishRegion(node);
862 break;
863 case IrOpcode::kStoreField:
864 ProcessStoreField(node);
865 break;
866 case IrOpcode::kLoadField:
867 ProcessLoadField(node);
868 break;
869 case IrOpcode::kStoreElement:
870 ProcessStoreElement(node);
871 break;
872 case IrOpcode::kLoadElement:
873 ProcessLoadElement(node);
874 break;
875 case IrOpcode::kStart:
876 ProcessStart(node);
877 break;
878 case IrOpcode::kEffectPhi:
879 return ProcessEffectPhi(node);
880 break;
881 default:
882 if (node->op()->EffectInputCount() > 0) {
883 ForwardVirtualState(node);
884 }
885 ProcessAllocationUsers(node);
886 break;
887 }
888 return true;
889}
890
891
892void EscapeAnalysis::ProcessAllocationUsers(Node* node) {
893 for (Edge edge : node->input_edges()) {
894 Node* input = edge.to();
895 if (!NodeProperties::IsValueEdge(edge) &&
896 !NodeProperties::IsContextEdge(edge))
897 continue;
898 switch (node->opcode()) {
899 case IrOpcode::kStoreField:
900 case IrOpcode::kLoadField:
901 case IrOpcode::kStoreElement:
902 case IrOpcode::kLoadElement:
903 case IrOpcode::kFrameState:
904 case IrOpcode::kStateValues:
905 case IrOpcode::kReferenceEqual:
906 case IrOpcode::kFinishRegion:
907 case IrOpcode::kPhi:
908 break;
909 default:
910 VirtualState* state = virtual_states_[node->id()];
911 if (VirtualObject* obj = ResolveVirtualObject(state, input)) {
912 if (obj->ClearAllFields()) {
913 state->LastChangedAt(node);
914 }
915 }
916 break;
917 }
918 }
919}
920
921
922bool EscapeAnalysis::IsEffectBranchPoint(Node* node) {
923 int count = 0;
924 for (Edge edge : node->use_edges()) {
925 if (NodeProperties::IsEffectEdge(edge)) {
926 if (++count > 1) {
927 return true;
928 }
929 }
930 }
931 return false;
932}
933
934
935void EscapeAnalysis::ForwardVirtualState(Node* node) {
936 DCHECK_EQ(node->op()->EffectInputCount(), 1);
937 if (node->opcode() != IrOpcode::kLoadField &&
938 node->opcode() != IrOpcode::kLoadElement &&
939 node->opcode() != IrOpcode::kLoad && IsDanglingEffectNode(node)) {
940 PrintF("Dangeling effect node: #%d (%s)\n", node->id(),
941 node->op()->mnemonic());
942 UNREACHABLE();
943 }
944 Node* effect = NodeProperties::GetEffectInput(node);
945 // Break the cycle for effect phis.
946 if (effect->opcode() == IrOpcode::kEffectPhi) {
947 if (virtual_states_[effect->id()] == nullptr) {
948 virtual_states_[effect->id()] =
949 new (zone()) VirtualState(zone(), AliasCount());
950 }
951 }
952 DCHECK_NOT_NULL(virtual_states_[effect->id()]);
953 if (IsEffectBranchPoint(effect)) {
954 if (FLAG_trace_turbo_escape) {
955 PrintF("Copying object state %p from #%d (%s) to #%d (%s)\n",
956 static_cast<void*>(virtual_states_[effect->id()]), effect->id(),
957 effect->op()->mnemonic(), node->id(), node->op()->mnemonic());
958 }
959 if (!virtual_states_[node->id()]) {
960 virtual_states_[node->id()] =
961 new (zone()) VirtualState(*virtual_states_[effect->id()]);
962 } else {
963 virtual_states_[node->id()]->UpdateFrom(virtual_states_[effect->id()],
964 zone());
965 }
966 } else {
967 virtual_states_[node->id()] = virtual_states_[effect->id()];
968 if (FLAG_trace_turbo_escape) {
969 PrintF("Forwarding object state %p from #%d (%s) to #%d (%s)\n",
970 static_cast<void*>(virtual_states_[effect->id()]), effect->id(),
971 effect->op()->mnemonic(), node->id(), node->op()->mnemonic());
972 }
973 }
974}
975
976
977void EscapeAnalysis::ProcessStart(Node* node) {
978 DCHECK_EQ(node->opcode(), IrOpcode::kStart);
979 virtual_states_[node->id()] = new (zone()) VirtualState(zone(), AliasCount());
980}
981
982
983bool EscapeAnalysis::ProcessEffectPhi(Node* node) {
984 DCHECK_EQ(node->opcode(), IrOpcode::kEffectPhi);
985 bool changed = false;
986
987 VirtualState* mergeState = virtual_states_[node->id()];
988 if (!mergeState) {
989 mergeState = new (zone()) VirtualState(zone(), AliasCount());
990 virtual_states_[node->id()] = mergeState;
991 changed = true;
992 if (FLAG_trace_turbo_escape) {
993 PrintF("Effect Phi #%d got new states map %p.\n", node->id(),
994 static_cast<void*>(mergeState));
995 }
996 } else if (mergeState->GetLastChanged() != node) {
997 changed = true;
998 }
999
1000 cache_->Clear();
1001
1002 if (FLAG_trace_turbo_escape) {
1003 PrintF("At Effect Phi #%d, merging states into %p:", node->id(),
1004 static_cast<void*>(mergeState));
1005 }
1006
1007 for (int i = 0; i < node->op()->EffectInputCount(); ++i) {
1008 Node* input = NodeProperties::GetEffectInput(node, i);
1009 VirtualState* state = virtual_states_[input->id()];
1010 if (state) {
1011 cache_->states().push_back(state);
1012 }
1013 if (FLAG_trace_turbo_escape) {
1014 PrintF(" %p (from %d %s)", static_cast<void*>(state), input->id(),
1015 input->op()->mnemonic());
1016 }
1017 }
1018 if (FLAG_trace_turbo_escape) {
1019 PrintF("\n");
1020 }
1021
1022 if (cache_->states().size() == 0) {
1023 return changed;
1024 }
1025
1026 changed = mergeState->MergeFrom(cache_, zone(), graph(), common(),
1027 NodeProperties::GetControlInput(node)) ||
1028 changed;
1029
1030 if (FLAG_trace_turbo_escape) {
1031 PrintF("Merge %s the node.\n", changed ? "changed" : "did not change");
1032 }
1033
1034 if (changed) {
1035 mergeState->LastChangedAt(node);
1036 escape_status_.Resize();
1037 }
1038 return changed;
1039}
1040
1041
1042void EscapeAnalysis::ProcessAllocation(Node* node) {
1043 DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
1044 ForwardVirtualState(node);
1045
1046 // Check if we have already processed this node.
1047 if (virtual_states_[node->id()]->VirtualObjectFromAlias(
1048 aliases_[node->id()])) {
1049 return;
1050 }
1051
1052 NumberMatcher size(node->InputAt(0));
1053 DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
1054 node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
1055 node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
1056 node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
1057 if (size.HasValue()) {
1058 virtual_states_[node->id()]->SetVirtualObject(
1059 aliases_[node->id()],
1060 new (zone())
1061 VirtualObject(node->id(), zone(), size.Value() / kPointerSize));
1062 } else {
1063 virtual_states_[node->id()]->SetVirtualObject(
1064 aliases_[node->id()], new (zone()) VirtualObject(node->id(), zone()));
1065 }
1066 virtual_states_[node->id()]->LastChangedAt(node);
1067}
1068
1069
1070void EscapeAnalysis::ProcessFinishRegion(Node* node) {
1071 DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
1072 ForwardVirtualState(node);
1073 Node* allocation = NodeProperties::GetValueInput(node, 0);
1074 if (allocation->opcode() == IrOpcode::kAllocate) {
1075 VirtualState* state = virtual_states_[node->id()];
1076 if (!state->VirtualObjectFromAlias(aliases_[node->id()])) {
1077 VirtualObject* vobj_alloc =
1078 state->VirtualObjectFromAlias(aliases_[allocation->id()]);
1079 DCHECK_NOT_NULL(vobj_alloc);
1080 state->SetVirtualObject(aliases_[node->id()], vobj_alloc);
1081 if (FLAG_trace_turbo_escape) {
1082 PrintF("Linked finish region node #%d to node #%d\n", node->id(),
1083 allocation->id());
1084 }
1085 state->LastChangedAt(node);
1086 }
1087 }
1088}
1089
1090
1091Node* EscapeAnalysis::replacement(NodeId id) {
1092 if (id >= replacements_.size()) return nullptr;
1093 return replacements_[id];
1094}
1095
1096
1097Node* EscapeAnalysis::replacement(Node* node) {
1098 return replacement(node->id());
1099}
1100
1101
1102bool EscapeAnalysis::SetReplacement(Node* node, Node* rep) {
1103 bool changed = replacements_[node->id()] != rep;
1104 replacements_[node->id()] = rep;
1105 return changed;
1106}
1107
1108
1109bool EscapeAnalysis::UpdateReplacement(VirtualState* state, Node* node,
1110 Node* rep) {
1111 if (SetReplacement(node, rep)) {
1112 state->LastChangedAt(node);
1113 if (FLAG_trace_turbo_escape) {
1114 if (rep) {
1115 PrintF("Replacement of #%d is #%d (%s)\n", node->id(), rep->id(),
1116 rep->op()->mnemonic());
1117 } else {
1118 PrintF("Replacement of #%d cleared\n", node->id());
1119 }
1120 }
1121 return true;
1122 }
1123 return false;
1124}
1125
1126
1127Node* EscapeAnalysis::ResolveReplacement(Node* node) {
1128 while (replacement(node)) {
1129 node = replacement(node);
1130 }
1131 return node;
1132}
1133
1134
1135Node* EscapeAnalysis::GetReplacement(Node* node) {
1136 return GetReplacement(node->id());
1137}
1138
1139
1140Node* EscapeAnalysis::GetReplacement(NodeId id) {
1141 Node* node = nullptr;
1142 while (replacement(id)) {
1143 node = replacement(id);
1144 id = node->id();
1145 }
1146 return node;
1147}
1148
1149
1150bool EscapeAnalysis::IsVirtual(Node* node) {
1151 if (node->id() >= escape_status_.size()) {
1152 return false;
1153 }
1154 return escape_status_.IsVirtual(node);
1155}
1156
1157
1158bool EscapeAnalysis::IsEscaped(Node* node) {
1159 if (node->id() >= escape_status_.size()) {
1160 return false;
1161 }
1162 return escape_status_.IsEscaped(node);
1163}
1164
1165
1166bool EscapeAnalysis::SetEscaped(Node* node) {
1167 return escape_status_.SetEscaped(node);
1168}
1169
1170
1171VirtualObject* EscapeAnalysis::GetVirtualObject(Node* at, NodeId id) {
1172 if (VirtualState* states = virtual_states_[at->id()]) {
1173 return states->VirtualObjectFromAlias(aliases_[id]);
1174 }
1175 return nullptr;
1176}
1177
1178
1179VirtualObject* EscapeAnalysis::ResolveVirtualObject(VirtualState* state,
1180 Node* node) {
1181 VirtualObject* obj = GetVirtualObject(state, ResolveReplacement(node));
1182 while (obj && replacement(obj->id())) {
1183 if (VirtualObject* next = GetVirtualObject(state, replacement(obj->id()))) {
1184 obj = next;
1185 } else {
1186 break;
1187 }
1188 }
1189 return obj;
1190}
1191
1192
1193bool EscapeAnalysis::CompareVirtualObjects(Node* left, Node* right) {
1194 DCHECK(IsVirtual(left) && IsVirtual(right));
1195 left = ResolveReplacement(left);
1196 right = ResolveReplacement(right);
1197 if (IsEquivalentPhi(left, right)) {
1198 return true;
1199 }
1200 return false;
1201}
1202
1203
1204int EscapeAnalysis::OffsetFromAccess(Node* node) {
1205 DCHECK(OpParameter<FieldAccess>(node).offset % kPointerSize == 0);
1206 return OpParameter<FieldAccess>(node).offset / kPointerSize;
1207}
1208
1209
1210void EscapeAnalysis::ProcessLoadFromPhi(int offset, Node* from, Node* node,
1211 VirtualState* state) {
1212 if (FLAG_trace_turbo_escape) {
1213 PrintF("Load #%d from phi #%d", node->id(), from->id());
1214 }
1215
1216 cache_->fields().clear();
1217 for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
1218 Node* input = NodeProperties::GetValueInput(node, i);
1219 cache_->fields().push_back(input);
1220 }
1221
1222 cache_->LoadVirtualObjectsForFieldsFrom(state, aliases_);
1223 if (cache_->objects().size() == cache_->fields().size()) {
1224 cache_->GetFields(offset);
1225 if (cache_->fields().size() == cache_->objects().size()) {
1226 Node* rep = replacement(node);
1227 if (!rep || !IsEquivalentPhi(rep, cache_->fields())) {
1228 int value_input_count = static_cast<int>(cache_->fields().size());
1229 cache_->fields().push_back(NodeProperties::GetControlInput(from));
1230 Node* phi = graph()->NewNode(
1231 common()->Phi(MachineRepresentation::kTagged, value_input_count),
1232 value_input_count + 1, &cache_->fields().front());
1233 escape_status_.Resize();
1234 SetReplacement(node, phi);
1235 state->LastChangedAt(node);
1236 if (FLAG_trace_turbo_escape) {
1237 PrintF(" got phi created.\n");
1238 }
1239 } else if (FLAG_trace_turbo_escape) {
1240 PrintF(" has already phi #%d.\n", rep->id());
1241 }
1242 } else if (FLAG_trace_turbo_escape) {
1243 PrintF(" has incomplete field info.\n");
1244 }
1245 } else if (FLAG_trace_turbo_escape) {
1246 PrintF(" has incomplete virtual object info.\n");
1247 }
1248}
1249
1250
1251void EscapeAnalysis::ProcessLoadField(Node* node) {
1252 DCHECK_EQ(node->opcode(), IrOpcode::kLoadField);
1253 ForwardVirtualState(node);
1254 Node* from = NodeProperties::GetValueInput(node, 0);
1255 VirtualState* state = virtual_states_[node->id()];
1256 if (VirtualObject* object = ResolveVirtualObject(state, from)) {
1257 int offset = OffsetFromAccess(node);
1258 if (!object->IsTracked()) return;
1259 Node* value = object->GetField(offset);
1260 if (value) {
1261 value = ResolveReplacement(value);
1262 }
1263 // Record that the load has this alias.
1264 UpdateReplacement(state, node, value);
1265 } else {
1266 if (from->opcode() == IrOpcode::kPhi &&
1267 OpParameter<FieldAccess>(node).offset % kPointerSize == 0) {
1268 int offset = OffsetFromAccess(node);
1269 // Only binary phis are supported for now.
1270 ProcessLoadFromPhi(offset, from, node, state);
1271 }
1272 }
1273}
1274
1275
1276void EscapeAnalysis::ProcessLoadElement(Node* node) {
1277 DCHECK_EQ(node->opcode(), IrOpcode::kLoadElement);
1278 ForwardVirtualState(node);
1279 Node* from = NodeProperties::GetValueInput(node, 0);
1280 VirtualState* state = virtual_states_[node->id()];
1281 Node* index_node = node->InputAt(1);
1282 NumberMatcher index(index_node);
1283 DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
1284 index_node->opcode() != IrOpcode::kInt64Constant &&
1285 index_node->opcode() != IrOpcode::kFloat32Constant &&
1286 index_node->opcode() != IrOpcode::kFloat64Constant);
1287 ElementAccess access = OpParameter<ElementAccess>(node);
1288 if (index.HasValue()) {
1289 int offset = index.Value() + access.header_size / kPointerSize;
1290 if (VirtualObject* object = ResolveVirtualObject(state, from)) {
1291 CHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
1292 kPointerSizeLog2);
1293 CHECK_EQ(access.header_size % kPointerSize, 0);
1294
1295 if (!object->IsTracked()) return;
1296 Node* value = object->GetField(offset);
1297 if (value) {
1298 value = ResolveReplacement(value);
1299 }
1300 // Record that the load has this alias.
1301 UpdateReplacement(state, node, value);
1302 } else if (from->opcode() == IrOpcode::kPhi) {
1303 ElementAccess access = OpParameter<ElementAccess>(node);
1304 int offset = index.Value() + access.header_size / kPointerSize;
1305 ProcessLoadFromPhi(offset, from, node, state);
1306 }
1307 } else {
1308 // We have a load from a non-const index, cannot eliminate object.
1309 if (SetEscaped(from)) {
1310 if (FLAG_trace_turbo_escape) {
1311 PrintF(
1312 "Setting #%d (%s) to escaped because store element #%d to "
1313 "non-const "
1314 "index #%d (%s)\n",
1315 from->id(), from->op()->mnemonic(), node->id(), index_node->id(),
1316 index_node->op()->mnemonic());
1317 }
1318 }
1319 }
1320}
1321
1322
1323void EscapeAnalysis::ProcessStoreField(Node* node) {
1324 DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
1325 ForwardVirtualState(node);
1326 Node* to = NodeProperties::GetValueInput(node, 0);
1327 Node* val = NodeProperties::GetValueInput(node, 1);
1328 VirtualState* state = virtual_states_[node->id()];
1329 if (VirtualObject* obj = ResolveVirtualObject(state, to)) {
1330 if (!obj->IsTracked()) return;
1331 int offset = OffsetFromAccess(node);
1332 if (obj->SetField(offset, ResolveReplacement(val))) {
1333 state->LastChangedAt(node);
1334 }
1335 }
1336}
1337
1338
1339void EscapeAnalysis::ProcessStoreElement(Node* node) {
1340 DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
1341 ForwardVirtualState(node);
1342 Node* to = NodeProperties::GetValueInput(node, 0);
1343 Node* index_node = node->InputAt(1);
1344 NumberMatcher index(index_node);
1345 DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
1346 index_node->opcode() != IrOpcode::kInt64Constant &&
1347 index_node->opcode() != IrOpcode::kFloat32Constant &&
1348 index_node->opcode() != IrOpcode::kFloat64Constant);
1349 ElementAccess access = OpParameter<ElementAccess>(node);
1350 Node* val = NodeProperties::GetValueInput(node, 2);
1351 if (index.HasValue()) {
1352 int offset = index.Value() + access.header_size / kPointerSize;
1353 VirtualState* states = virtual_states_[node->id()];
1354 if (VirtualObject* obj = ResolveVirtualObject(states, to)) {
1355 if (!obj->IsTracked()) return;
1356 CHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
1357 kPointerSizeLog2);
1358 CHECK_EQ(access.header_size % kPointerSize, 0);
1359 if (obj->SetField(offset, ResolveReplacement(val))) {
1360 states->LastChangedAt(node);
1361 }
1362 }
1363 } else {
1364 // We have a store to a non-const index, cannot eliminate object.
1365 if (SetEscaped(to)) {
1366 if (FLAG_trace_turbo_escape) {
1367 PrintF(
1368 "Setting #%d (%s) to escaped because store element #%d to "
1369 "non-const "
1370 "index #%d (%s)\n",
1371 to->id(), to->op()->mnemonic(), node->id(), index_node->id(),
1372 index_node->op()->mnemonic());
1373 }
1374 }
1375 }
1376}
1377
1378
1379Node* EscapeAnalysis::GetOrCreateObjectState(Node* effect, Node* node) {
1380 if ((node->opcode() == IrOpcode::kFinishRegion ||
1381 node->opcode() == IrOpcode::kAllocate) &&
1382 IsVirtual(node)) {
1383 if (VirtualObject* vobj =
1384 ResolveVirtualObject(virtual_states_[effect->id()], node)) {
1385 if (Node* object_state = vobj->GetObjectState()) {
1386 return object_state;
1387 } else {
1388 cache_->fields().clear();
1389 for (size_t i = 0; i < vobj->field_count(); ++i) {
1390 if (Node* field = vobj->GetField(i)) {
1391 cache_->fields().push_back(field);
1392 }
1393 }
1394 int input_count = static_cast<int>(cache_->fields().size());
1395 Node* new_object_state =
1396 graph()->NewNode(common()->ObjectState(input_count, vobj->id()),
1397 input_count, &cache_->fields().front());
1398 vobj->SetObjectState(new_object_state);
1399 if (FLAG_trace_turbo_escape) {
1400 PrintF(
1401 "Creating object state #%d for vobj %p (from node #%d) at effect "
1402 "#%d\n",
1403 new_object_state->id(), static_cast<void*>(vobj), node->id(),
1404 effect->id());
1405 }
1406 // Now fix uses of other objects.
1407 for (size_t i = 0; i < vobj->field_count(); ++i) {
1408 if (Node* field = vobj->GetField(i)) {
1409 if (Node* field_object_state =
1410 GetOrCreateObjectState(effect, field)) {
1411 NodeProperties::ReplaceValueInput(
1412 new_object_state, field_object_state, static_cast<int>(i));
1413 }
1414 }
1415 }
1416 return new_object_state;
1417 }
1418 }
1419 }
1420 return nullptr;
1421}
1422
1423
1424void EscapeAnalysis::DebugPrintObject(VirtualObject* object, Alias alias) {
1425 PrintF(" Alias @%d: Object #%d with %zu fields\n", alias, object->id(),
1426 object->field_count());
1427 for (size_t i = 0; i < object->field_count(); ++i) {
1428 if (Node* f = object->GetField(i)) {
1429 PrintF(" Field %zu = #%d (%s)\n", i, f->id(), f->op()->mnemonic());
1430 }
1431 }
1432}
1433
1434
1435void EscapeAnalysis::DebugPrintState(VirtualState* state) {
1436 PrintF("Dumping object state %p\n", static_cast<void*>(state));
1437 for (Alias alias = 0; alias < AliasCount(); ++alias) {
1438 if (VirtualObject* object = state->VirtualObjectFromAlias(alias)) {
1439 DebugPrintObject(object, alias);
1440 }
1441 }
1442}
1443
1444
1445void EscapeAnalysis::DebugPrint() {
1446 ZoneVector<VirtualState*> object_states(zone());
1447 for (NodeId id = 0; id < virtual_states_.size(); id++) {
1448 if (VirtualState* states = virtual_states_[id]) {
1449 if (std::find(object_states.begin(), object_states.end(), states) ==
1450 object_states.end()) {
1451 object_states.push_back(states);
1452 }
1453 }
1454 }
1455 for (size_t n = 0; n < object_states.size(); n++) {
1456 DebugPrintState(object_states[n]);
1457 }
1458}
1459
1460
1461VirtualObject* EscapeAnalysis::GetVirtualObject(VirtualState* state,
1462 Node* node) {
1463 if (node->id() >= aliases_.size()) return nullptr;
1464 Alias alias = aliases_[node->id()];
1465 if (alias >= state->size()) return nullptr;
1466 return state->VirtualObjectFromAlias(alias);
1467}
1468
1469} // namespace compiler
1470} // namespace internal
1471} // namespace v8