Revert "Revert "Upgrade to 5.0.71.48"" DO NOT MERGE
This reverts commit f2e3994fa5148cc3d9946666f0b0596290192b0e,
and updates the x64 makefile properly so it doesn't break that
build.
FPIIM-449
Change-Id: Ib83e35bfbae6af627451c926a9650ec57c045605
(cherry picked from commit 109988c7ccb6f3fd1a58574fa3dfb88beaef6632)
diff --git a/src/compiler/escape-analysis.cc b/src/compiler/escape-analysis.cc
index af0ba6a..b1a12b2 100644
--- a/src/compiler/escape-analysis.cc
+++ b/src/compiler/escape-analysis.cc
@@ -24,106 +24,134 @@
namespace internal {
namespace compiler {
-const EscapeAnalysis::Alias EscapeAnalysis::kNotReachable =
- std::numeric_limits<Alias>::max();
-const EscapeAnalysis::Alias EscapeAnalysis::kUntrackable =
- std::numeric_limits<Alias>::max() - 1;
+using Alias = EscapeStatusAnalysis::Alias;
+#ifdef DEBUG
+#define TRACE(...) \
+ do { \
+ if (FLAG_trace_turbo_escape) PrintF(__VA_ARGS__); \
+ } while (false)
+#else
+#define TRACE(...)
+#endif
+
+const Alias EscapeStatusAnalysis::kNotReachable =
+ std::numeric_limits<Alias>::max();
+const Alias EscapeStatusAnalysis::kUntrackable =
+ std::numeric_limits<Alias>::max() - 1;
class VirtualObject : public ZoneObject {
public:
- enum Status { kUntracked = 0, kTracked = 1 };
- VirtualObject(NodeId id, Zone* zone)
+ enum Status {
+ kInitial = 0,
+ kTracked = 1u << 0,
+ kInitialized = 1u << 1,
+ kCopyRequired = 1u << 2,
+ };
+ typedef base::Flags<Status, unsigned char> StatusFlags;
+
+ VirtualObject(NodeId id, VirtualState* owner, Zone* zone)
: id_(id),
- status_(kUntracked),
+ status_(kInitial),
fields_(zone),
phi_(zone),
- object_state_(nullptr) {}
+ object_state_(nullptr),
+ owner_(owner) {}
- VirtualObject(const VirtualObject& other)
+ VirtualObject(VirtualState* owner, const VirtualObject& other)
: id_(other.id_),
- status_(other.status_),
+ status_(other.status_ & ~kCopyRequired),
fields_(other.fields_),
phi_(other.phi_),
- object_state_(other.object_state_) {}
+ object_state_(other.object_state_),
+ owner_(owner) {}
- VirtualObject(NodeId id, Zone* zone, size_t field_number)
+ VirtualObject(NodeId id, VirtualState* owner, Zone* zone, size_t field_number,
+ bool initialized)
: id_(id),
- status_(kTracked),
+ status_(kTracked | (initialized ? kInitialized : kInitial)),
fields_(zone),
phi_(zone),
- object_state_(nullptr) {
+ object_state_(nullptr),
+ owner_(owner) {
fields_.resize(field_number);
phi_.resize(field_number, false);
}
- Node* GetField(size_t offset) {
- if (offset < fields_.size()) {
- return fields_[offset];
- }
- return nullptr;
- }
+ Node* GetField(size_t offset) { return fields_[offset]; }
- bool IsCreatedPhi(size_t offset) {
- if (offset < phi_.size()) {
- return phi_[offset];
- }
- return false;
- }
+ bool IsCreatedPhi(size_t offset) { return phi_[offset]; }
- bool SetField(size_t offset, Node* node, bool created_phi = false) {
- bool changed = fields_[offset] != node || phi_[offset] != created_phi;
+ void SetField(size_t offset, Node* node, bool created_phi = false) {
fields_[offset] = node;
phi_[offset] = created_phi;
- if (changed && FLAG_trace_turbo_escape && node) {
- PrintF("Setting field %zu of #%d to #%d (%s)\n", offset, id(), node->id(),
- node->op()->mnemonic());
- }
- return changed;
}
- bool IsVirtual() const { return status_ == kTracked; }
- bool IsTracked() const { return status_ != kUntracked; }
+ bool IsTracked() const { return status_ & kTracked; }
+ bool IsInitialized() const { return status_ & kInitialized; }
+ bool SetInitialized() { return status_ |= kInitialized; }
+ VirtualState* owner() const { return owner_; }
Node** fields_array() { return &fields_.front(); }
size_t field_count() { return fields_.size(); }
bool ResizeFields(size_t field_count) {
- if (field_count != fields_.size()) {
+ if (field_count > fields_.size()) {
fields_.resize(field_count);
phi_.resize(field_count);
return true;
}
return false;
}
- bool ClearAllFields() {
- bool changed = false;
+ void ClearAllFields() {
for (size_t i = 0; i < fields_.size(); ++i) {
- if (fields_[i] != nullptr) {
- fields_[i] = nullptr;
- changed = true;
- }
+ fields_[i] = nullptr;
phi_[i] = false;
}
- return changed;
+ }
+ bool AllFieldsClear() {
+ for (size_t i = 0; i < fields_.size(); ++i) {
+ if (fields_[i] != nullptr) {
+ return false;
+ }
+ }
+ return true;
}
bool UpdateFrom(const VirtualObject& other);
+ bool MergeFrom(MergeCache* cache, Node* at, Graph* graph,
+ CommonOperatorBuilder* common);
void SetObjectState(Node* node) { object_state_ = node; }
Node* GetObjectState() const { return object_state_; }
+ bool IsCopyRequired() const { return status_ & kCopyRequired; }
+ void SetCopyRequired() { status_ |= kCopyRequired; }
+ bool NeedCopyForModification() {
+ if (!IsCopyRequired() || !IsInitialized()) {
+ return false;
+ }
+ return true;
+ }
NodeId id() const { return id_; }
void id(NodeId id) { id_ = id; }
private:
+ bool MergeFields(size_t i, Node* at, MergeCache* cache, Graph* graph,
+ CommonOperatorBuilder* common);
+
NodeId id_;
- Status status_;
+ StatusFlags status_;
ZoneVector<Node*> fields_;
ZoneVector<bool> phi_;
Node* object_state_;
+ VirtualState* owner_;
+
+ DISALLOW_COPY_AND_ASSIGN(VirtualObject);
};
+DEFINE_OPERATORS_FOR_FLAGS(VirtualObject::StatusFlags)
bool VirtualObject::UpdateFrom(const VirtualObject& other) {
bool changed = status_ != other.status_;
status_ = other.status_;
+ phi_ = other.phi_;
if (fields_.size() != other.fields_.size()) {
fields_ = other.fields_;
return true;
@@ -137,36 +165,49 @@
return changed;
}
-
class VirtualState : public ZoneObject {
public:
- VirtualState(Zone* zone, size_t size);
- VirtualState(const VirtualState& states);
+ VirtualState(Node* owner, Zone* zone, size_t size)
+ : info_(size, nullptr, zone), owner_(owner) {}
+
+ VirtualState(Node* owner, const VirtualState& state)
+ : info_(state.info_.size(), nullptr, state.info_.get_allocator().zone()),
+ owner_(owner) {
+ for (size_t i = 0; i < info_.size(); ++i) {
+ if (state.info_[i]) {
+ info_[i] = state.info_[i];
+ }
+ }
+ }
VirtualObject* VirtualObjectFromAlias(size_t alias);
- VirtualObject* GetOrCreateTrackedVirtualObject(EscapeAnalysis::Alias alias,
- NodeId id, Zone* zone);
- void SetVirtualObject(EscapeAnalysis::Alias alias, VirtualObject* state);
- void LastChangedAt(Node* node) { last_changed_ = node; }
- Node* GetLastChanged() { return last_changed_; }
+ void SetVirtualObject(Alias alias, VirtualObject* state);
bool UpdateFrom(VirtualState* state, Zone* zone);
bool MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
- CommonOperatorBuilder* common, Node* control);
+ CommonOperatorBuilder* common, Node* at);
size_t size() const { return info_.size(); }
+ Node* owner() const { return owner_; }
+ VirtualObject* Copy(VirtualObject* obj, Alias alias);
+ void SetCopyRequired() {
+ for (VirtualObject* obj : info_) {
+ if (obj) obj->SetCopyRequired();
+ }
+ }
private:
ZoneVector<VirtualObject*> info_;
- Node* last_changed_;
-};
+ Node* owner_;
+ DISALLOW_COPY_AND_ASSIGN(VirtualState);
+};
class MergeCache : public ZoneObject {
public:
explicit MergeCache(Zone* zone)
: states_(zone), objects_(zone), fields_(zone) {
- states_.reserve(4);
- objects_.reserve(4);
- fields_.reserve(4);
+ states_.reserve(5);
+ objects_.reserve(5);
+ fields_.reserve(5);
}
ZoneVector<VirtualState*>& states() { return states_; }
ZoneVector<VirtualObject*>& objects() { return objects_; }
@@ -176,20 +217,20 @@
objects_.clear();
fields_.clear();
}
- size_t LoadVirtualObjectsFromStatesFor(EscapeAnalysis::Alias alias);
- void LoadVirtualObjectsForFieldsFrom(
- VirtualState* state, const ZoneVector<EscapeAnalysis::Alias>& aliases);
+ size_t LoadVirtualObjectsFromStatesFor(Alias alias);
+ void LoadVirtualObjectsForFieldsFrom(VirtualState* state,
+ const ZoneVector<Alias>& aliases);
Node* GetFields(size_t pos);
private:
ZoneVector<VirtualState*> states_;
ZoneVector<VirtualObject*> objects_;
ZoneVector<Node*> fields_;
+
+ DISALLOW_COPY_AND_ASSIGN(MergeCache);
};
-
-size_t MergeCache::LoadVirtualObjectsFromStatesFor(
- EscapeAnalysis::Alias alias) {
+size_t MergeCache::LoadVirtualObjectsFromStatesFor(Alias alias) {
objects_.clear();
DCHECK_GT(states_.size(), 0u);
size_t min = std::numeric_limits<size_t>::max();
@@ -202,13 +243,12 @@
return min;
}
-
void MergeCache::LoadVirtualObjectsForFieldsFrom(
- VirtualState* state, const ZoneVector<EscapeAnalysis::Alias>& aliases) {
+ VirtualState* state, const ZoneVector<Alias>& aliases) {
objects_.clear();
size_t max_alias = state->size();
for (Node* field : fields_) {
- EscapeAnalysis::Alias alias = aliases[field->id()];
+ Alias alias = aliases[field->id()];
if (alias >= max_alias) continue;
if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
objects_.push_back(obj);
@@ -216,11 +256,13 @@
}
}
-
Node* MergeCache::GetFields(size_t pos) {
fields_.clear();
- Node* rep = objects_.front()->GetField(pos);
+ Node* rep = pos >= objects_.front()->field_count()
+ ? nullptr
+ : objects_.front()->GetField(pos);
for (VirtualObject* obj : objects_) {
+ if (pos >= obj->field_count()) continue;
Node* field = obj->GetField(pos);
if (field) {
fields_.push_back(field);
@@ -232,72 +274,48 @@
return rep;
}
-
-VirtualState::VirtualState(Zone* zone, size_t size)
- : info_(size, nullptr, zone), last_changed_(nullptr) {}
-
-
-VirtualState::VirtualState(const VirtualState& state)
- : info_(state.info_.size(), nullptr, state.info_.get_allocator().zone()),
- last_changed_(state.last_changed_) {
- for (size_t i = 0; i < state.info_.size(); ++i) {
- if (state.info_[i]) {
- info_[i] =
- new (info_.get_allocator().zone()) VirtualObject(*state.info_[i]);
- }
- }
+VirtualObject* VirtualState::Copy(VirtualObject* obj, Alias alias) {
+ if (obj->owner() == this) return obj;
+ VirtualObject* new_obj =
+ new (info_.get_allocator().zone()) VirtualObject(this, *obj);
+ TRACE("At state %p, alias @%d (#%d), copying virtual object from %p to %p\n",
+ static_cast<void*>(this), alias, obj->id(), static_cast<void*>(obj),
+ static_cast<void*>(new_obj));
+ info_[alias] = new_obj;
+ return new_obj;
}
-
VirtualObject* VirtualState::VirtualObjectFromAlias(size_t alias) {
return info_[alias];
}
-
-VirtualObject* VirtualState::GetOrCreateTrackedVirtualObject(
- EscapeAnalysis::Alias alias, NodeId id, Zone* zone) {
- if (VirtualObject* obj = VirtualObjectFromAlias(alias)) {
- return obj;
- }
- VirtualObject* obj = new (zone) VirtualObject(id, zone, 0);
- SetVirtualObject(alias, obj);
- return obj;
-}
-
-
-void VirtualState::SetVirtualObject(EscapeAnalysis::Alias alias,
- VirtualObject* obj) {
+void VirtualState::SetVirtualObject(Alias alias, VirtualObject* obj) {
info_[alias] = obj;
}
-
bool VirtualState::UpdateFrom(VirtualState* from, Zone* zone) {
+ if (from == this) return false;
bool changed = false;
- for (EscapeAnalysis::Alias alias = 0; alias < size(); ++alias) {
+ for (Alias alias = 0; alias < size(); ++alias) {
VirtualObject* ls = VirtualObjectFromAlias(alias);
VirtualObject* rs = from->VirtualObjectFromAlias(alias);
- if (rs == nullptr) {
- continue;
- }
+ if (ls == rs || rs == nullptr) continue;
if (ls == nullptr) {
- ls = new (zone) VirtualObject(*rs);
+ ls = new (zone) VirtualObject(this, *rs);
SetVirtualObject(alias, ls);
changed = true;
continue;
}
- if (FLAG_trace_turbo_escape) {
- PrintF(" Updating fields of @%d\n", alias);
- }
+ TRACE(" Updating fields of @%d\n", alias);
changed = ls->UpdateFrom(*rs) || changed;
}
return false;
}
-
namespace {
bool IsEquivalentPhi(Node* node1, Node* node2) {
@@ -316,7 +334,6 @@
return true;
}
-
bool IsEquivalentPhi(Node* phi, ZoneVector<Node*>& inputs) {
if (phi->opcode() != IrOpcode::kPhi) return false;
if (phi->op()->ValueInputCount() != inputs.size()) {
@@ -333,186 +350,225 @@
} // namespace
-
-Node* EscapeAnalysis::GetReplacementIfSame(ZoneVector<VirtualObject*>& objs) {
- Node* rep = GetReplacement(objs.front()->id());
- for (VirtualObject* obj : objs) {
- if (GetReplacement(obj->id()) != rep) {
- return nullptr;
+bool VirtualObject::MergeFields(size_t i, Node* at, MergeCache* cache,
+ Graph* graph, CommonOperatorBuilder* common) {
+ bool changed = false;
+ int value_input_count = static_cast<int>(cache->fields().size());
+ Node* rep = GetField(i);
+ if (!rep || !IsCreatedPhi(i)) {
+ Node* control = NodeProperties::GetControlInput(at);
+ cache->fields().push_back(control);
+ Node* phi = graph->NewNode(
+ common->Phi(MachineRepresentation::kTagged, value_input_count),
+ value_input_count + 1, &cache->fields().front());
+ SetField(i, phi, true);
+#ifdef DEBUG
+ if (FLAG_trace_turbo_escape) {
+ PrintF(" Creating Phi #%d as merge of", phi->id());
+ for (int i = 0; i < value_input_count; i++) {
+ PrintF(" #%d (%s)", cache->fields()[i]->id(),
+ cache->fields()[i]->op()->mnemonic());
+ }
+ PrintF("\n");
+ }
+#endif
+ changed = true;
+ } else {
+ DCHECK(rep->opcode() == IrOpcode::kPhi);
+ for (int n = 0; n < value_input_count; ++n) {
+ Node* old = NodeProperties::GetValueInput(rep, n);
+ if (old != cache->fields()[n]) {
+ changed = true;
+ NodeProperties::ReplaceValueInput(rep, cache->fields()[n], n);
+ }
}
}
- return rep;
+ return changed;
}
+bool VirtualObject::MergeFrom(MergeCache* cache, Node* at, Graph* graph,
+ CommonOperatorBuilder* common) {
+ DCHECK(at->opcode() == IrOpcode::kEffectPhi ||
+ at->opcode() == IrOpcode::kPhi);
+ bool changed = false;
+ for (size_t i = 0; i < field_count(); ++i) {
+ if (Node* field = cache->GetFields(i)) {
+ changed = changed || GetField(i) != field;
+ SetField(i, field);
+ TRACE(" Field %zu agree on rep #%d\n", i, field->id());
+ } else {
+ int arity = at->opcode() == IrOpcode::kEffectPhi
+ ? at->op()->EffectInputCount()
+ : at->op()->ValueInputCount();
+ if (cache->fields().size() == arity) {
+ changed = MergeFields(i, at, cache, graph, common) || changed;
+ } else {
+ if (GetField(i) != nullptr) {
+ TRACE(" Field %zu cleared\n", i);
+ changed = true;
+ }
+ SetField(i, nullptr);
+ }
+ }
+ }
+ return changed;
+}
bool VirtualState::MergeFrom(MergeCache* cache, Zone* zone, Graph* graph,
- CommonOperatorBuilder* common, Node* control) {
+ CommonOperatorBuilder* common, Node* at) {
DCHECK_GT(cache->states().size(), 0u);
bool changed = false;
- for (EscapeAnalysis::Alias alias = 0; alias < size(); ++alias) {
- size_t fields = cache->LoadVirtualObjectsFromStatesFor(alias);
- if (cache->objects().size() == cache->states().size()) {
- if (FLAG_trace_turbo_escape) {
- PrintF(" Merging virtual objects of @%d\n", alias);
- }
- VirtualObject* mergeObject = GetOrCreateTrackedVirtualObject(
- alias, cache->objects().front()->id(), zone);
- changed = mergeObject->ResizeFields(fields) || changed;
- for (size_t i = 0; i < fields; ++i) {
- if (Node* field = cache->GetFields(i)) {
- changed = mergeObject->SetField(i, field) || changed;
- if (FLAG_trace_turbo_escape) {
- PrintF(" Field %zu agree on rep #%d\n", i, field->id());
- }
- } else {
- int value_input_count = static_cast<int>(cache->fields().size());
- if (cache->fields().size() == cache->objects().size()) {
- Node* rep = mergeObject->GetField(i);
- if (!rep || !mergeObject->IsCreatedPhi(i)) {
- cache->fields().push_back(control);
- Node* phi = graph->NewNode(
- common->Phi(MachineRepresentation::kTagged,
- value_input_count),
- value_input_count + 1, &cache->fields().front());
- mergeObject->SetField(i, phi, true);
- if (FLAG_trace_turbo_escape) {
- PrintF(" Creating Phi #%d as merge of", phi->id());
- for (int i = 0; i < value_input_count; i++) {
- PrintF(" #%d (%s)", cache->fields()[i]->id(),
- cache->fields()[i]->op()->mnemonic());
- }
- PrintF("\n");
- }
- changed = true;
- } else {
- DCHECK(rep->opcode() == IrOpcode::kPhi);
- for (int n = 0; n < value_input_count; ++n) {
- if (n < rep->op()->ValueInputCount()) {
- Node* old = NodeProperties::GetValueInput(rep, n);
- if (old != cache->fields()[n]) {
- changed = true;
- NodeProperties::ReplaceValueInput(rep, cache->fields()[n],
- n);
- }
- } else {
- changed = true;
- rep->InsertInput(graph->zone(), n, cache->fields()[n]);
- }
- }
- if (rep->op()->ValueInputCount() != value_input_count) {
- if (FLAG_trace_turbo_escape) {
- PrintF(" Widening Phi #%d of arity %d to %d", rep->id(),
- rep->op()->ValueInputCount(), value_input_count);
- }
- NodeProperties::ChangeOp(
- rep, common->Phi(MachineRepresentation::kTagged,
- value_input_count));
- }
- }
- } else {
- changed = mergeObject->SetField(i, nullptr) || changed;
- }
+ for (Alias alias = 0; alias < size(); ++alias) {
+ cache->objects().clear();
+ VirtualObject* mergeObject = VirtualObjectFromAlias(alias);
+ bool copy_merge_object = false;
+ size_t fields = std::numeric_limits<size_t>::max();
+ for (VirtualState* state : cache->states()) {
+ if (VirtualObject* obj = state->VirtualObjectFromAlias(alias)) {
+ cache->objects().push_back(obj);
+ if (mergeObject == obj) {
+ copy_merge_object = true;
}
+ fields = std::min(obj->field_count(), fields);
}
+ }
+ if (cache->objects().size() == cache->states().size()) {
+ if (!mergeObject) {
+ VirtualObject* obj = new (zone)
+ VirtualObject(cache->objects().front()->id(), this, zone, fields,
+ cache->objects().front()->IsInitialized());
+ SetVirtualObject(alias, obj);
+ mergeObject = obj;
+ changed = true;
+ } else if (copy_merge_object) {
+ VirtualObject* obj = new (zone) VirtualObject(this, *mergeObject);
+ SetVirtualObject(alias, obj);
+ mergeObject = obj;
+ changed = true;
+ } else {
+ changed = mergeObject->ResizeFields(fields) || changed;
+ }
+#ifdef DEBUG
+ if (FLAG_trace_turbo_escape) {
+ PrintF(" Alias @%d, merging into %p virtual objects", alias,
+ static_cast<void*>(mergeObject));
+ for (size_t i = 0; i < cache->objects().size(); i++) {
+ PrintF(" %p", static_cast<void*>(cache->objects()[i]));
+ }
+ PrintF("\n");
+ }
+#endif // DEBUG
+ changed = mergeObject->MergeFrom(cache, at, graph, common) || changed;
} else {
+ if (mergeObject) {
+ TRACE(" Alias %d, virtual object removed\n", alias);
+ changed = true;
+ }
SetVirtualObject(alias, nullptr);
}
}
return changed;
}
-
EscapeStatusAnalysis::EscapeStatusAnalysis(EscapeAnalysis* object_analysis,
Graph* graph, Zone* zone)
- : object_analysis_(object_analysis),
+ : stack_(zone),
+ object_analysis_(object_analysis),
graph_(graph),
zone_(zone),
- status_(graph->NodeCount(), kUnknown, zone),
- queue_(zone) {}
-
+ status_(zone),
+ next_free_alias_(0),
+ status_stack_(zone),
+ aliases_(zone) {}
EscapeStatusAnalysis::~EscapeStatusAnalysis() {}
-
bool EscapeStatusAnalysis::HasEntry(Node* node) {
return status_[node->id()] & (kTracked | kEscaped);
}
-
bool EscapeStatusAnalysis::IsVirtual(Node* node) {
- return (status_[node->id()] & kTracked) && !(status_[node->id()] & kEscaped);
+ return IsVirtual(node->id());
}
+bool EscapeStatusAnalysis::IsVirtual(NodeId id) {
+ return (status_[id] & kTracked) && !(status_[id] & kEscaped);
+}
bool EscapeStatusAnalysis::IsEscaped(Node* node) {
return status_[node->id()] & kEscaped;
}
-
bool EscapeStatusAnalysis::IsAllocation(Node* node) {
return node->opcode() == IrOpcode::kAllocate ||
node->opcode() == IrOpcode::kFinishRegion;
}
-
bool EscapeStatusAnalysis::SetEscaped(Node* node) {
bool changed = !(status_[node->id()] & kEscaped);
status_[node->id()] |= kEscaped | kTracked;
return changed;
}
-
-void EscapeStatusAnalysis::Resize() {
- status_.resize(graph()->NodeCount(), kUnknown);
+bool EscapeStatusAnalysis::IsInQueue(NodeId id) {
+ return status_[id] & kInQueue;
}
-
-size_t EscapeStatusAnalysis::size() { return status_.size(); }
-
-
-void EscapeStatusAnalysis::Run() {
- Resize();
- queue_.push_back(graph()->end());
- status_[graph()->end()->id()] |= kOnStack;
- while (!queue_.empty()) {
- Node* node = queue_.front();
- queue_.pop_front();
- status_[node->id()] &= ~kOnStack;
- Process(node);
- status_[node->id()] |= kVisited;
- for (Edge edge : node->input_edges()) {
- Node* input = edge.to();
- if (!(status_[input->id()] & (kVisited | kOnStack))) {
- queue_.push_back(input);
- status_[input->id()] |= kOnStack;
- }
- }
+void EscapeStatusAnalysis::SetInQueue(NodeId id, bool on_stack) {
+ if (on_stack) {
+ status_[id] |= kInQueue;
+ } else {
+ status_[id] &= ~kInQueue;
}
}
+void EscapeStatusAnalysis::ResizeStatusVector() {
+ if (status_.size() <= graph()->NodeCount()) {
+ status_.resize(graph()->NodeCount() * 1.1, kUnknown);
+ }
+}
+
+size_t EscapeStatusAnalysis::GetStatusVectorSize() { return status_.size(); }
+
+void EscapeStatusAnalysis::RunStatusAnalysis() {
+ ResizeStatusVector();
+ while (!status_stack_.empty()) {
+ Node* node = status_stack_.back();
+ status_stack_.pop_back();
+ status_[node->id()] &= ~kOnStack;
+ Process(node);
+ status_[node->id()] |= kVisited;
+ }
+}
+
+void EscapeStatusAnalysis::EnqueueForStatusAnalysis(Node* node) {
+ DCHECK_NOT_NULL(node);
+ if (!(status_[node->id()] & kOnStack)) {
+ status_stack_.push_back(node);
+ status_[node->id()] |= kOnStack;
+ }
+}
void EscapeStatusAnalysis::RevisitInputs(Node* node) {
for (Edge edge : node->input_edges()) {
Node* input = edge.to();
if (!(status_[input->id()] & kOnStack)) {
- queue_.push_back(input);
+ status_stack_.push_back(input);
status_[input->id()] |= kOnStack;
}
}
}
-
void EscapeStatusAnalysis::RevisitUses(Node* node) {
for (Edge edge : node->use_edges()) {
Node* use = edge.from();
- if (!(status_[use->id()] & kOnStack)) {
- queue_.push_back(use);
+ if (!(status_[use->id()] & kOnStack) && !IsNotReachable(use)) {
+ status_stack_.push_back(use);
status_[use->id()] |= kOnStack;
}
}
}
-
void EscapeStatusAnalysis::Process(Node* node) {
switch (node->opcode()) {
case IrOpcode::kAllocate:
@@ -535,15 +591,17 @@
RevisitUses(rep);
}
}
+ RevisitUses(node);
break;
}
case IrOpcode::kPhi:
if (!HasEntry(node)) {
status_[node->id()] |= kTracked;
- if (!IsAllocationPhi(node)) {
- SetEscaped(node);
- RevisitUses(node);
- }
+ RevisitUses(node);
+ }
+ if (!IsAllocationPhi(node) && SetEscaped(node)) {
+ RevisitInputs(node);
+ RevisitUses(node);
}
CheckUsesForEscape(node);
default:
@@ -551,7 +609,6 @@
}
}
-
bool EscapeStatusAnalysis::IsAllocationPhi(Node* node) {
for (Edge edge : node->input_edges()) {
Node* input = edge.to();
@@ -562,7 +619,6 @@
return true;
}
-
void EscapeStatusAnalysis::ProcessStoreField(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
Node* to = NodeProperties::GetValueInput(node, 0);
@@ -570,14 +626,11 @@
if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
RevisitUses(val);
RevisitInputs(val);
- if (FLAG_trace_turbo_escape) {
- PrintF("Setting #%d (%s) to escaped because of store to field of #%d\n",
- val->id(), val->op()->mnemonic(), to->id());
- }
+ TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
+ val->id(), val->op()->mnemonic(), to->id());
}
}
-
void EscapeStatusAnalysis::ProcessStoreElement(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
Node* to = NodeProperties::GetValueInput(node, 0);
@@ -585,34 +638,27 @@
if ((IsEscaped(to) || !IsAllocation(to)) && SetEscaped(val)) {
RevisitUses(val);
RevisitInputs(val);
- if (FLAG_trace_turbo_escape) {
- PrintF("Setting #%d (%s) to escaped because of store to field of #%d\n",
- val->id(), val->op()->mnemonic(), to->id());
- }
+ TRACE("Setting #%d (%s) to escaped because of store to field of #%d\n",
+ val->id(), val->op()->mnemonic(), to->id());
}
}
-
void EscapeStatusAnalysis::ProcessAllocate(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
if (!HasEntry(node)) {
status_[node->id()] |= kTracked;
- if (FLAG_trace_turbo_escape) {
- PrintF("Created status entry for node #%d (%s)\n", node->id(),
- node->op()->mnemonic());
- }
+ TRACE("Created status entry for node #%d (%s)\n", node->id(),
+ node->op()->mnemonic());
NumberMatcher size(node->InputAt(0));
DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
+ RevisitUses(node);
if (!size.HasValue() && SetEscaped(node)) {
- RevisitUses(node);
- if (FLAG_trace_turbo_escape) {
- PrintF("Setting #%d to escaped because of non-const alloc\n",
- node->id());
- }
- // This node is known to escape, uses do not have to be checked.
+ TRACE("Setting #%d to escaped because of non-const alloc\n", node->id());
+ // This node is already known to escape, uses do not have to be checked
+ // for escape.
return;
}
}
@@ -621,24 +667,22 @@
}
}
-
bool EscapeStatusAnalysis::CheckUsesForEscape(Node* uses, Node* rep,
bool phi_escaping) {
for (Edge edge : uses->use_edges()) {
Node* use = edge.from();
+ if (IsNotReachable(use)) continue;
if (edge.index() >= use->op()->ValueInputCount() +
OperatorProperties::GetContextInputCount(use->op()))
continue;
switch (use->opcode()) {
case IrOpcode::kPhi:
if (phi_escaping && SetEscaped(rep)) {
- if (FLAG_trace_turbo_escape) {
- PrintF(
- "Setting #%d (%s) to escaped because of use by phi node "
- "#%d (%s)\n",
- rep->id(), rep->op()->mnemonic(), use->id(),
- use->op()->mnemonic());
- }
+ TRACE(
+ "Setting #%d (%s) to escaped because of use by phi node "
+ "#%d (%s)\n",
+ rep->id(), rep->op()->mnemonic(), use->id(),
+ use->op()->mnemonic());
return true;
}
// Fallthrough.
@@ -651,37 +695,41 @@
case IrOpcode::kReferenceEqual:
case IrOpcode::kFinishRegion:
if (IsEscaped(use) && SetEscaped(rep)) {
- if (FLAG_trace_turbo_escape) {
- PrintF(
- "Setting #%d (%s) to escaped because of use by escaping node "
- "#%d (%s)\n",
- rep->id(), rep->op()->mnemonic(), use->id(),
- use->op()->mnemonic());
- }
+ TRACE(
+ "Setting #%d (%s) to escaped because of use by escaping node "
+ "#%d (%s)\n",
+ rep->id(), rep->op()->mnemonic(), use->id(),
+ use->op()->mnemonic());
return true;
}
break;
case IrOpcode::kObjectIsSmi:
if (!IsAllocation(rep) && SetEscaped(rep)) {
- PrintF("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
- rep->id(), rep->op()->mnemonic(), use->id(),
- use->op()->mnemonic());
+ TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
+ rep->id(), rep->op()->mnemonic(), use->id(),
+ use->op()->mnemonic());
+ return true;
+ }
+ break;
+ case IrOpcode::kSelect:
+ if (SetEscaped(rep)) {
+ TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
+ rep->id(), rep->op()->mnemonic(), use->id(),
+ use->op()->mnemonic());
return true;
}
break;
default:
if (use->op()->EffectInputCount() == 0 &&
uses->op()->EffectInputCount() > 0) {
- PrintF("Encountered unaccounted use by #%d (%s)\n", use->id(),
- use->op()->mnemonic());
+ TRACE("Encountered unaccounted use by #%d (%s)\n", use->id(),
+ use->op()->mnemonic());
UNREACHABLE();
}
if (SetEscaped(rep)) {
- if (FLAG_trace_turbo_escape) {
- PrintF("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
- rep->id(), rep->op()->mnemonic(), use->id(),
- use->op()->mnemonic());
- }
+ TRACE("Setting #%d (%s) to escaped because of use by #%d (%s)\n",
+ rep->id(), rep->op()->mnemonic(), use->id(),
+ use->op()->mnemonic());
return true;
}
}
@@ -689,7 +737,6 @@
return false;
}
-
void EscapeStatusAnalysis::ProcessFinishRegion(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
if (!HasEntry(node)) {
@@ -701,7 +748,6 @@
}
}
-
void EscapeStatusAnalysis::DebugPrint() {
for (NodeId id = 0; id < status_.size(); id++) {
if (status_[id] & kTracked) {
@@ -711,58 +757,69 @@
}
}
-
EscapeAnalysis::EscapeAnalysis(Graph* graph, CommonOperatorBuilder* common,
Zone* zone)
- : graph_(graph),
+ : status_analysis_(this, graph, zone),
common_(common),
- zone_(zone),
virtual_states_(zone),
replacements_(zone),
- escape_status_(this, graph, zone),
- cache_(new (zone) MergeCache(zone)),
- aliases_(zone),
- next_free_alias_(0) {}
-
+ cache_(nullptr) {}
EscapeAnalysis::~EscapeAnalysis() {}
-
void EscapeAnalysis::Run() {
replacements_.resize(graph()->NodeCount());
- AssignAliases();
- RunObjectAnalysis();
- escape_status_.Run();
+ status_analysis_.AssignAliases();
+ if (status_analysis_.AliasCount() > 0) {
+ cache_ = new (zone()) MergeCache(zone());
+ replacements_.resize(graph()->NodeCount());
+ status_analysis_.ResizeStatusVector();
+ RunObjectAnalysis();
+ status_analysis_.RunStatusAnalysis();
+ }
}
-
-void EscapeAnalysis::AssignAliases() {
- ZoneVector<Node*> stack(zone());
- stack.push_back(graph()->end());
+void EscapeStatusAnalysis::AssignAliases() {
+ size_t max_size = 1024;
+ size_t min_size = 32;
+ size_t stack_size =
+ std::min(std::max(graph()->NodeCount() / 5, min_size), max_size);
+ stack_.reserve(stack_size);
+ ResizeStatusVector();
+ stack_.push_back(graph()->end());
CHECK_LT(graph()->NodeCount(), kUntrackable);
aliases_.resize(graph()->NodeCount(), kNotReachable);
aliases_[graph()->end()->id()] = kUntrackable;
- while (!stack.empty()) {
- Node* node = stack.back();
- stack.pop_back();
+ status_stack_.reserve(8);
+ TRACE("Discovering trackable nodes");
+ while (!stack_.empty()) {
+ Node* node = stack_.back();
+ stack_.pop_back();
switch (node->opcode()) {
case IrOpcode::kAllocate:
if (aliases_[node->id()] >= kUntrackable) {
aliases_[node->id()] = NextAlias();
+ TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
+ node->id());
+ EnqueueForStatusAnalysis(node);
}
break;
case IrOpcode::kFinishRegion: {
Node* allocate = NodeProperties::GetValueInput(node, 0);
+ DCHECK_NOT_NULL(allocate);
if (allocate->opcode() == IrOpcode::kAllocate) {
if (aliases_[allocate->id()] >= kUntrackable) {
if (aliases_[allocate->id()] == kNotReachable) {
- stack.push_back(allocate);
+ stack_.push_back(allocate);
}
aliases_[allocate->id()] = NextAlias();
+ TRACE(" @%d:%s#%u", aliases_[allocate->id()],
+ allocate->op()->mnemonic(), allocate->id());
+ EnqueueForStatusAnalysis(allocate);
}
aliases_[node->id()] = aliases_[allocate->id()];
- } else {
- aliases_[node->id()] = NextAlias();
+ TRACE(" @%d:%s#%u", aliases_[node->id()], node->op()->mnemonic(),
+ node->id());
}
break;
}
@@ -773,81 +830,119 @@
for (Edge edge : node->input_edges()) {
Node* input = edge.to();
if (aliases_[input->id()] == kNotReachable) {
- stack.push_back(input);
+ stack_.push_back(input);
aliases_[input->id()] = kUntrackable;
}
}
}
-
- if (FLAG_trace_turbo_escape) {
- PrintF("Discovered trackable nodes");
- for (EscapeAnalysis::Alias id = 0; id < graph()->NodeCount(); ++id) {
- if (aliases_[id] < kUntrackable) {
- if (FLAG_trace_turbo_escape) {
- PrintF(" #%u", id);
- }
- }
- }
- PrintF("\n");
- }
+ TRACE("\n");
}
+bool EscapeStatusAnalysis::IsNotReachable(Node* node) {
+ if (node->id() >= aliases_.size()) {
+ return false;
+ }
+ return aliases_[node->id()] == kNotReachable;
+}
void EscapeAnalysis::RunObjectAnalysis() {
virtual_states_.resize(graph()->NodeCount());
- ZoneVector<Node*> stack(zone());
- stack.push_back(graph()->start());
- while (!stack.empty()) {
- Node* node = stack.back();
- stack.pop_back();
- if (aliases_[node->id()] != kNotReachable && Process(node)) {
+ ZoneDeque<Node*> queue(zone());
+ queue.push_back(graph()->start());
+ ZoneVector<Node*> danglers(zone());
+ while (!queue.empty()) {
+ Node* node = queue.back();
+ queue.pop_back();
+ status_analysis_.SetInQueue(node->id(), false);
+ if (Process(node)) {
for (Edge edge : node->use_edges()) {
+ Node* use = edge.from();
+ if (IsNotReachable(use)) {
+ continue;
+ }
if (NodeProperties::IsEffectEdge(edge)) {
- Node* use = edge.from();
- if ((use->opcode() != IrOpcode::kLoadField &&
- use->opcode() != IrOpcode::kLoadElement) ||
- !IsDanglingEffectNode(use)) {
- stack.push_back(use);
+ // Iteration order: depth first, but delay phis.
+ // We need DFS do avoid some duplication of VirtualStates and
+ // VirtualObjects, and we want to delay phis to improve performance.
+ if (use->opcode() == IrOpcode::kEffectPhi) {
+ if (!status_analysis_.IsInQueue(use->id())) {
+ queue.push_front(use);
+ }
+ } else if ((use->opcode() != IrOpcode::kLoadField &&
+ use->opcode() != IrOpcode::kLoadElement) ||
+ !IsDanglingEffectNode(use)) {
+ if (!status_analysis_.IsInQueue(use->id())) {
+ status_analysis_.SetInQueue(use->id(), true);
+ queue.push_back(use);
+ }
+ } else {
+ danglers.push_back(use);
}
}
}
- // First process loads: dangling loads are a problem otherwise.
- for (Edge edge : node->use_edges()) {
- if (NodeProperties::IsEffectEdge(edge)) {
- Node* use = edge.from();
- if ((use->opcode() == IrOpcode::kLoadField ||
- use->opcode() == IrOpcode::kLoadElement) &&
- IsDanglingEffectNode(use)) {
- stack.push_back(use);
- }
- }
- }
+ // Danglers need to be processed immediately, even if they are
+ // on the stack. Since they do not have effect outputs,
+ // we don't have to track whether they are on the stack.
+ queue.insert(queue.end(), danglers.begin(), danglers.end());
+ danglers.clear();
}
}
+#ifdef DEBUG
if (FLAG_trace_turbo_escape) {
DebugPrint();
}
+#endif
}
-
-bool EscapeAnalysis::IsDanglingEffectNode(Node* node) {
- if (node->op()->EffectInputCount() == 0) return false;
- if (node->op()->EffectOutputCount() == 0) return false;
- if (node->op()->EffectInputCount() == 1 &&
- NodeProperties::GetEffectInput(node)->opcode() == IrOpcode::kStart) {
+bool EscapeStatusAnalysis::IsDanglingEffectNode(Node* node) {
+ if (status_[node->id()] & kDanglingComputed) {
+ return status_[node->id()] & kDangling;
+ }
+ if (node->op()->EffectInputCount() == 0 ||
+ node->op()->EffectOutputCount() == 0 ||
+ (node->op()->EffectInputCount() == 1 &&
+ NodeProperties::GetEffectInput(node)->opcode() == IrOpcode::kStart)) {
// The start node is used as sentinel for nodes that are in general
// effectful, but of which an analysis has determined that they do not
// produce effects in this instance. We don't consider these nodes dangling.
+ status_[node->id()] |= kDanglingComputed;
return false;
}
for (Edge edge : node->use_edges()) {
+ Node* use = edge.from();
+ if (aliases_[use->id()] == kNotReachable) continue;
if (NodeProperties::IsEffectEdge(edge)) {
+ status_[node->id()] |= kDanglingComputed;
return false;
}
}
+ status_[node->id()] |= kDanglingComputed | kDangling;
return true;
}
+bool EscapeStatusAnalysis::IsEffectBranchPoint(Node* node) {
+ if (status_[node->id()] & kBranchPointComputed) {
+ return status_[node->id()] & kBranchPoint;
+ }
+ int count = 0;
+ for (Edge edge : node->use_edges()) {
+ Node* use = edge.from();
+ if (aliases_[use->id()] == kNotReachable) continue;
+ if (NodeProperties::IsEffectEdge(edge)) {
+ if ((use->opcode() == IrOpcode::kLoadField ||
+ use->opcode() == IrOpcode::kLoadElement ||
+ use->opcode() == IrOpcode::kLoad) &&
+ IsDanglingEffectNode(use))
+ continue;
+ if (++count > 1) {
+ status_[node->id()] |= kBranchPointComputed | kBranchPoint;
+ return true;
+ }
+ }
+ }
+ status_[node->id()] |= kBranchPointComputed;
+ return false;
+}
bool EscapeAnalysis::Process(Node* node) {
switch (node->opcode()) {
@@ -888,12 +983,12 @@
return true;
}
-
void EscapeAnalysis::ProcessAllocationUsers(Node* node) {
for (Edge edge : node->input_edges()) {
Node* input = edge.to();
- if (!NodeProperties::IsValueEdge(edge) &&
- !NodeProperties::IsContextEdge(edge))
+ Node* use = edge.from();
+ if (edge.index() >= use->op()->ValueInputCount() +
+ OperatorProperties::GetContextInputCount(use->op()))
continue;
switch (node->opcode()) {
case IrOpcode::kStoreField:
@@ -904,13 +999,17 @@
case IrOpcode::kStateValues:
case IrOpcode::kReferenceEqual:
case IrOpcode::kFinishRegion:
- case IrOpcode::kPhi:
+ case IrOpcode::kObjectIsSmi:
break;
default:
VirtualState* state = virtual_states_[node->id()];
- if (VirtualObject* obj = ResolveVirtualObject(state, input)) {
- if (obj->ClearAllFields()) {
- state->LastChangedAt(node);
+ if (VirtualObject* obj =
+ GetVirtualObject(state, ResolveReplacement(input))) {
+ if (!obj->AllFieldsClear()) {
+ obj = CopyForModificationAt(obj, state, node);
+ obj->ClearAllFields();
+ TRACE("Cleared all fields of @%d:#%d\n", GetAlias(obj->id()),
+ obj->id());
}
}
break;
@@ -918,22 +1017,32 @@
}
}
-
-bool EscapeAnalysis::IsEffectBranchPoint(Node* node) {
- int count = 0;
- for (Edge edge : node->use_edges()) {
- if (NodeProperties::IsEffectEdge(edge)) {
- if (++count > 1) {
- return true;
- }
- }
+VirtualState* EscapeAnalysis::CopyForModificationAt(VirtualState* state,
+ Node* node) {
+ if (state->owner() != node) {
+ VirtualState* new_state = new (zone()) VirtualState(node, *state);
+ virtual_states_[node->id()] = new_state;
+ TRACE("Copying virtual state %p to new state %p at node %s#%d\n",
+ static_cast<void*>(state), static_cast<void*>(new_state),
+ node->op()->mnemonic(), node->id());
+ return new_state;
}
- return false;
+ return state;
}
+VirtualObject* EscapeAnalysis::CopyForModificationAt(VirtualObject* obj,
+ VirtualState* state,
+ Node* node) {
+ if (obj->NeedCopyForModification()) {
+ state = CopyForModificationAt(state, node);
+ return state->Copy(obj, GetAlias(obj->id()));
+ }
+ return obj;
+}
void EscapeAnalysis::ForwardVirtualState(Node* node) {
DCHECK_EQ(node->op()->EffectInputCount(), 1);
+#ifdef DEBUG
if (node->opcode() != IrOpcode::kLoadField &&
node->opcode() != IrOpcode::kLoadElement &&
node->opcode() != IrOpcode::kLoad && IsDanglingEffectNode(node)) {
@@ -941,189 +1050,154 @@
node->op()->mnemonic());
UNREACHABLE();
}
+#endif // DEBUG
Node* effect = NodeProperties::GetEffectInput(node);
- // Break the cycle for effect phis.
- if (effect->opcode() == IrOpcode::kEffectPhi) {
- if (virtual_states_[effect->id()] == nullptr) {
- virtual_states_[effect->id()] =
- new (zone()) VirtualState(zone(), AliasCount());
- }
- }
DCHECK_NOT_NULL(virtual_states_[effect->id()]);
- if (IsEffectBranchPoint(effect)) {
- if (FLAG_trace_turbo_escape) {
- PrintF("Copying object state %p from #%d (%s) to #%d (%s)\n",
- static_cast<void*>(virtual_states_[effect->id()]), effect->id(),
- effect->op()->mnemonic(), node->id(), node->op()->mnemonic());
- }
- if (!virtual_states_[node->id()]) {
- virtual_states_[node->id()] =
- new (zone()) VirtualState(*virtual_states_[effect->id()]);
- } else {
- virtual_states_[node->id()]->UpdateFrom(virtual_states_[effect->id()],
- zone());
- }
+ if (virtual_states_[node->id()]) {
+ virtual_states_[node->id()]->UpdateFrom(virtual_states_[effect->id()],
+ zone());
} else {
virtual_states_[node->id()] = virtual_states_[effect->id()];
- if (FLAG_trace_turbo_escape) {
- PrintF("Forwarding object state %p from #%d (%s) to #%d (%s)\n",
- static_cast<void*>(virtual_states_[effect->id()]), effect->id(),
- effect->op()->mnemonic(), node->id(), node->op()->mnemonic());
+ TRACE("Forwarding object state %p from %s#%d to %s#%d",
+ static_cast<void*>(virtual_states_[effect->id()]),
+ effect->op()->mnemonic(), effect->id(), node->op()->mnemonic(),
+ node->id());
+ if (IsEffectBranchPoint(effect) ||
+ OperatorProperties::GetFrameStateInputCount(node->op()) > 0) {
+ virtual_states_[node->id()]->SetCopyRequired();
+ TRACE(", effect input %s#%d is branch point", effect->op()->mnemonic(),
+ effect->id());
}
+ TRACE("\n");
}
}
-
void EscapeAnalysis::ProcessStart(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kStart);
- virtual_states_[node->id()] = new (zone()) VirtualState(zone(), AliasCount());
+ virtual_states_[node->id()] =
+ new (zone()) VirtualState(node, zone(), AliasCount());
}
-
bool EscapeAnalysis::ProcessEffectPhi(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kEffectPhi);
bool changed = false;
VirtualState* mergeState = virtual_states_[node->id()];
if (!mergeState) {
- mergeState = new (zone()) VirtualState(zone(), AliasCount());
+ mergeState = new (zone()) VirtualState(node, zone(), AliasCount());
virtual_states_[node->id()] = mergeState;
changed = true;
- if (FLAG_trace_turbo_escape) {
- PrintF("Effect Phi #%d got new states map %p.\n", node->id(),
- static_cast<void*>(mergeState));
- }
- } else if (mergeState->GetLastChanged() != node) {
- changed = true;
+ TRACE("Effect Phi #%d got new virtual state %p.\n", node->id(),
+ static_cast<void*>(mergeState));
}
cache_->Clear();
- if (FLAG_trace_turbo_escape) {
- PrintF("At Effect Phi #%d, merging states into %p:", node->id(),
- static_cast<void*>(mergeState));
- }
+ TRACE("At Effect Phi #%d, merging states into %p:", node->id(),
+ static_cast<void*>(mergeState));
for (int i = 0; i < node->op()->EffectInputCount(); ++i) {
Node* input = NodeProperties::GetEffectInput(node, i);
VirtualState* state = virtual_states_[input->id()];
if (state) {
cache_->states().push_back(state);
+ if (state == mergeState) {
+ mergeState = new (zone()) VirtualState(node, zone(), AliasCount());
+ virtual_states_[node->id()] = mergeState;
+ changed = true;
+ }
}
- if (FLAG_trace_turbo_escape) {
- PrintF(" %p (from %d %s)", static_cast<void*>(state), input->id(),
- input->op()->mnemonic());
- }
+ TRACE(" %p (from %d %s)", static_cast<void*>(state), input->id(),
+ input->op()->mnemonic());
}
- if (FLAG_trace_turbo_escape) {
- PrintF("\n");
- }
+ TRACE("\n");
if (cache_->states().size() == 0) {
return changed;
}
- changed = mergeState->MergeFrom(cache_, zone(), graph(), common(),
- NodeProperties::GetControlInput(node)) ||
- changed;
+ changed =
+ mergeState->MergeFrom(cache_, zone(), graph(), common(), node) || changed;
- if (FLAG_trace_turbo_escape) {
- PrintF("Merge %s the node.\n", changed ? "changed" : "did not change");
- }
+ TRACE("Merge %s the node.\n", changed ? "changed" : "did not change");
if (changed) {
- mergeState->LastChangedAt(node);
- escape_status_.Resize();
+ status_analysis_.ResizeStatusVector();
}
return changed;
}
-
void EscapeAnalysis::ProcessAllocation(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kAllocate);
ForwardVirtualState(node);
+ VirtualState* state = virtual_states_[node->id()];
+ Alias alias = GetAlias(node->id());
// Check if we have already processed this node.
- if (virtual_states_[node->id()]->VirtualObjectFromAlias(
- aliases_[node->id()])) {
+ if (state->VirtualObjectFromAlias(alias)) {
return;
}
+ if (state->owner()->opcode() == IrOpcode::kEffectPhi) {
+ state = CopyForModificationAt(state, node);
+ }
+
NumberMatcher size(node->InputAt(0));
DCHECK(node->InputAt(0)->opcode() != IrOpcode::kInt32Constant &&
node->InputAt(0)->opcode() != IrOpcode::kInt64Constant &&
node->InputAt(0)->opcode() != IrOpcode::kFloat32Constant &&
node->InputAt(0)->opcode() != IrOpcode::kFloat64Constant);
if (size.HasValue()) {
- virtual_states_[node->id()]->SetVirtualObject(
- aliases_[node->id()],
- new (zone())
- VirtualObject(node->id(), zone(), size.Value() / kPointerSize));
+ VirtualObject* obj = new (zone()) VirtualObject(
+ node->id(), state, zone(), size.Value() / kPointerSize, false);
+ state->SetVirtualObject(alias, obj);
} else {
- virtual_states_[node->id()]->SetVirtualObject(
- aliases_[node->id()], new (zone()) VirtualObject(node->id(), zone()));
+ state->SetVirtualObject(
+ alias, new (zone()) VirtualObject(node->id(), state, zone()));
}
- virtual_states_[node->id()]->LastChangedAt(node);
}
-
void EscapeAnalysis::ProcessFinishRegion(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kFinishRegion);
ForwardVirtualState(node);
Node* allocation = NodeProperties::GetValueInput(node, 0);
if (allocation->opcode() == IrOpcode::kAllocate) {
VirtualState* state = virtual_states_[node->id()];
- if (!state->VirtualObjectFromAlias(aliases_[node->id()])) {
- VirtualObject* vobj_alloc =
- state->VirtualObjectFromAlias(aliases_[allocation->id()]);
- DCHECK_NOT_NULL(vobj_alloc);
- state->SetVirtualObject(aliases_[node->id()], vobj_alloc);
- if (FLAG_trace_turbo_escape) {
- PrintF("Linked finish region node #%d to node #%d\n", node->id(),
- allocation->id());
- }
- state->LastChangedAt(node);
- }
+ VirtualObject* obj = state->VirtualObjectFromAlias(GetAlias(node->id()));
+ DCHECK_NOT_NULL(obj);
+ obj->SetInitialized();
}
}
-
Node* EscapeAnalysis::replacement(NodeId id) {
if (id >= replacements_.size()) return nullptr;
return replacements_[id];
}
-
Node* EscapeAnalysis::replacement(Node* node) {
return replacement(node->id());
}
-
bool EscapeAnalysis::SetReplacement(Node* node, Node* rep) {
bool changed = replacements_[node->id()] != rep;
replacements_[node->id()] = rep;
return changed;
}
-
bool EscapeAnalysis::UpdateReplacement(VirtualState* state, Node* node,
Node* rep) {
if (SetReplacement(node, rep)) {
- state->LastChangedAt(node);
- if (FLAG_trace_turbo_escape) {
- if (rep) {
- PrintF("Replacement of #%d is #%d (%s)\n", node->id(), rep->id(),
- rep->op()->mnemonic());
- } else {
- PrintF("Replacement of #%d cleared\n", node->id());
- }
+ if (rep) {
+ TRACE("Replacement of #%d is #%d (%s)\n", node->id(), rep->id(),
+ rep->op()->mnemonic());
+ } else {
+ TRACE("Replacement of #%d cleared\n", node->id());
}
return true;
}
return false;
}
-
Node* EscapeAnalysis::ResolveReplacement(Node* node) {
while (replacement(node)) {
node = replacement(node);
@@ -1131,12 +1205,10 @@
return node;
}
-
Node* EscapeAnalysis::GetReplacement(Node* node) {
return GetReplacement(node->id());
}
-
Node* EscapeAnalysis::GetReplacement(NodeId id) {
Node* node = nullptr;
while (replacement(id)) {
@@ -1146,50 +1218,31 @@
return node;
}
-
bool EscapeAnalysis::IsVirtual(Node* node) {
- if (node->id() >= escape_status_.size()) {
+ if (node->id() >= status_analysis_.GetStatusVectorSize()) {
return false;
}
- return escape_status_.IsVirtual(node);
+ return status_analysis_.IsVirtual(node);
}
-
bool EscapeAnalysis::IsEscaped(Node* node) {
- if (node->id() >= escape_status_.size()) {
+ if (node->id() >= status_analysis_.GetStatusVectorSize()) {
return false;
}
- return escape_status_.IsEscaped(node);
+ return status_analysis_.IsEscaped(node);
}
-
bool EscapeAnalysis::SetEscaped(Node* node) {
- return escape_status_.SetEscaped(node);
+ return status_analysis_.SetEscaped(node);
}
-
VirtualObject* EscapeAnalysis::GetVirtualObject(Node* at, NodeId id) {
if (VirtualState* states = virtual_states_[at->id()]) {
- return states->VirtualObjectFromAlias(aliases_[id]);
+ return states->VirtualObjectFromAlias(GetAlias(id));
}
return nullptr;
}
-
-VirtualObject* EscapeAnalysis::ResolveVirtualObject(VirtualState* state,
- Node* node) {
- VirtualObject* obj = GetVirtualObject(state, ResolveReplacement(node));
- while (obj && replacement(obj->id())) {
- if (VirtualObject* next = GetVirtualObject(state, replacement(obj->id()))) {
- obj = next;
- } else {
- break;
- }
- }
- return obj;
-}
-
-
bool EscapeAnalysis::CompareVirtualObjects(Node* left, Node* right) {
DCHECK(IsVirtual(left) && IsVirtual(right));
left = ResolveReplacement(left);
@@ -1200,83 +1253,78 @@
return false;
}
-
int EscapeAnalysis::OffsetFromAccess(Node* node) {
DCHECK(OpParameter<FieldAccess>(node).offset % kPointerSize == 0);
return OpParameter<FieldAccess>(node).offset / kPointerSize;
}
-
-void EscapeAnalysis::ProcessLoadFromPhi(int offset, Node* from, Node* node,
+void EscapeAnalysis::ProcessLoadFromPhi(int offset, Node* from, Node* load,
VirtualState* state) {
- if (FLAG_trace_turbo_escape) {
- PrintF("Load #%d from phi #%d", node->id(), from->id());
- }
+ TRACE("Load #%d from phi #%d", load->id(), from->id());
cache_->fields().clear();
- for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
- Node* input = NodeProperties::GetValueInput(node, i);
+ for (int i = 0; i < load->op()->ValueInputCount(); ++i) {
+ Node* input = NodeProperties::GetValueInput(load, i);
cache_->fields().push_back(input);
}
- cache_->LoadVirtualObjectsForFieldsFrom(state, aliases_);
+ cache_->LoadVirtualObjectsForFieldsFrom(state,
+ status_analysis_.GetAliasMap());
if (cache_->objects().size() == cache_->fields().size()) {
cache_->GetFields(offset);
if (cache_->fields().size() == cache_->objects().size()) {
- Node* rep = replacement(node);
+ Node* rep = replacement(load);
if (!rep || !IsEquivalentPhi(rep, cache_->fields())) {
int value_input_count = static_cast<int>(cache_->fields().size());
cache_->fields().push_back(NodeProperties::GetControlInput(from));
Node* phi = graph()->NewNode(
common()->Phi(MachineRepresentation::kTagged, value_input_count),
value_input_count + 1, &cache_->fields().front());
- escape_status_.Resize();
- SetReplacement(node, phi);
- state->LastChangedAt(node);
- if (FLAG_trace_turbo_escape) {
- PrintF(" got phi created.\n");
- }
- } else if (FLAG_trace_turbo_escape) {
- PrintF(" has already phi #%d.\n", rep->id());
+ status_analysis_.ResizeStatusVector();
+ SetReplacement(load, phi);
+ TRACE(" got phi created.\n");
+ } else {
+ TRACE(" has already phi #%d.\n", rep->id());
}
- } else if (FLAG_trace_turbo_escape) {
- PrintF(" has incomplete field info.\n");
+ } else {
+ TRACE(" has incomplete field info.\n");
}
- } else if (FLAG_trace_turbo_escape) {
- PrintF(" has incomplete virtual object info.\n");
+ } else {
+ TRACE(" has incomplete virtual object info.\n");
}
}
-
void EscapeAnalysis::ProcessLoadField(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kLoadField);
ForwardVirtualState(node);
- Node* from = NodeProperties::GetValueInput(node, 0);
+ Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
VirtualState* state = virtual_states_[node->id()];
- if (VirtualObject* object = ResolveVirtualObject(state, from)) {
+ if (VirtualObject* object = GetVirtualObject(state, from)) {
int offset = OffsetFromAccess(node);
- if (!object->IsTracked()) return;
+ if (!object->IsTracked() ||
+ static_cast<size_t>(offset) >= object->field_count()) {
+ return;
+ }
Node* value = object->GetField(offset);
if (value) {
value = ResolveReplacement(value);
}
// Record that the load has this alias.
UpdateReplacement(state, node, value);
+ } else if (from->opcode() == IrOpcode::kPhi &&
+ OpParameter<FieldAccess>(node).offset % kPointerSize == 0) {
+ int offset = OffsetFromAccess(node);
+ // Only binary phis are supported for now.
+ ProcessLoadFromPhi(offset, from, node, state);
} else {
- if (from->opcode() == IrOpcode::kPhi &&
- OpParameter<FieldAccess>(node).offset % kPointerSize == 0) {
- int offset = OffsetFromAccess(node);
- // Only binary phis are supported for now.
- ProcessLoadFromPhi(offset, from, node, state);
- }
+ UpdateReplacement(state, node, nullptr);
}
}
-
void EscapeAnalysis::ProcessLoadElement(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kLoadElement);
ForwardVirtualState(node);
- Node* from = NodeProperties::GetValueInput(node, 0);
+ Node* from = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
VirtualState* state = virtual_states_[node->id()];
Node* index_node = node->InputAt(1);
NumberMatcher index(index_node);
@@ -1287,12 +1335,16 @@
ElementAccess access = OpParameter<ElementAccess>(node);
if (index.HasValue()) {
int offset = index.Value() + access.header_size / kPointerSize;
- if (VirtualObject* object = ResolveVirtualObject(state, from)) {
+ if (VirtualObject* object = GetVirtualObject(state, from)) {
CHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
kPointerSizeLog2);
CHECK_EQ(access.header_size % kPointerSize, 0);
- if (!object->IsTracked()) return;
+ if (!object->IsTracked() ||
+ static_cast<size_t>(offset) >= object->field_count()) {
+ return;
+ }
+
Node* value = object->GetField(offset);
if (value) {
value = ResolveReplacement(value);
@@ -1303,43 +1355,42 @@
ElementAccess access = OpParameter<ElementAccess>(node);
int offset = index.Value() + access.header_size / kPointerSize;
ProcessLoadFromPhi(offset, from, node, state);
+ } else {
+ UpdateReplacement(state, node, nullptr);
}
} else {
// We have a load from a non-const index, cannot eliminate object.
if (SetEscaped(from)) {
- if (FLAG_trace_turbo_escape) {
- PrintF(
- "Setting #%d (%s) to escaped because store element #%d to "
- "non-const "
- "index #%d (%s)\n",
- from->id(), from->op()->mnemonic(), node->id(), index_node->id(),
- index_node->op()->mnemonic());
- }
+ TRACE(
+ "Setting #%d (%s) to escaped because load element #%d from non-const "
+ "index #%d (%s)\n",
+ from->id(), from->op()->mnemonic(), node->id(), index_node->id(),
+ index_node->op()->mnemonic());
}
}
}
-
void EscapeAnalysis::ProcessStoreField(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kStoreField);
ForwardVirtualState(node);
- Node* to = NodeProperties::GetValueInput(node, 0);
- Node* val = NodeProperties::GetValueInput(node, 1);
+ Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
VirtualState* state = virtual_states_[node->id()];
- if (VirtualObject* obj = ResolveVirtualObject(state, to)) {
- if (!obj->IsTracked()) return;
- int offset = OffsetFromAccess(node);
- if (obj->SetField(offset, ResolveReplacement(val))) {
- state->LastChangedAt(node);
+ VirtualObject* obj = GetVirtualObject(state, to);
+ int offset = OffsetFromAccess(node);
+ if (obj && obj->IsTracked() &&
+ static_cast<size_t>(offset) < obj->field_count()) {
+ Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 1));
+ if (obj->GetField(offset) != val) {
+ obj = CopyForModificationAt(obj, state, node);
+ obj->SetField(offset, val);
}
}
}
-
void EscapeAnalysis::ProcessStoreElement(Node* node) {
DCHECK_EQ(node->opcode(), IrOpcode::kStoreElement);
ForwardVirtualState(node);
- Node* to = NodeProperties::GetValueInput(node, 0);
+ Node* to = ResolveReplacement(NodeProperties::GetValueInput(node, 0));
Node* index_node = node->InputAt(1);
NumberMatcher index(index_node);
DCHECK(index_node->opcode() != IrOpcode::kInt32Constant &&
@@ -1347,41 +1398,47 @@
index_node->opcode() != IrOpcode::kFloat32Constant &&
index_node->opcode() != IrOpcode::kFloat64Constant);
ElementAccess access = OpParameter<ElementAccess>(node);
- Node* val = NodeProperties::GetValueInput(node, 2);
+ VirtualState* state = virtual_states_[node->id()];
+ VirtualObject* obj = GetVirtualObject(state, to);
if (index.HasValue()) {
int offset = index.Value() + access.header_size / kPointerSize;
- VirtualState* states = virtual_states_[node->id()];
- if (VirtualObject* obj = ResolveVirtualObject(states, to)) {
- if (!obj->IsTracked()) return;
+ if (obj && obj->IsTracked() &&
+ static_cast<size_t>(offset) < obj->field_count()) {
CHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
kPointerSizeLog2);
CHECK_EQ(access.header_size % kPointerSize, 0);
- if (obj->SetField(offset, ResolveReplacement(val))) {
- states->LastChangedAt(node);
+ Node* val = ResolveReplacement(NodeProperties::GetValueInput(node, 2));
+ if (obj->GetField(offset) != val) {
+ obj = CopyForModificationAt(obj, state, node);
+ obj->SetField(offset, val);
}
}
} else {
// We have a store to a non-const index, cannot eliminate object.
if (SetEscaped(to)) {
- if (FLAG_trace_turbo_escape) {
- PrintF(
- "Setting #%d (%s) to escaped because store element #%d to "
- "non-const "
- "index #%d (%s)\n",
- to->id(), to->op()->mnemonic(), node->id(), index_node->id(),
- index_node->op()->mnemonic());
+ TRACE(
+ "Setting #%d (%s) to escaped because store element #%d to non-const "
+ "index #%d (%s)\n",
+ to->id(), to->op()->mnemonic(), node->id(), index_node->id(),
+ index_node->op()->mnemonic());
+ }
+ if (obj && obj->IsTracked()) {
+ if (!obj->AllFieldsClear()) {
+ obj = CopyForModificationAt(obj, state, node);
+ obj->ClearAllFields();
+ TRACE("Cleared all fields of @%d:#%d\n", GetAlias(obj->id()),
+ obj->id());
}
}
}
}
-
Node* EscapeAnalysis::GetOrCreateObjectState(Node* effect, Node* node) {
if ((node->opcode() == IrOpcode::kFinishRegion ||
node->opcode() == IrOpcode::kAllocate) &&
IsVirtual(node)) {
- if (VirtualObject* vobj =
- ResolveVirtualObject(virtual_states_[effect->id()], node)) {
+ if (VirtualObject* vobj = GetVirtualObject(virtual_states_[effect->id()],
+ ResolveReplacement(node))) {
if (Node* object_state = vobj->GetObjectState()) {
return object_state;
} else {
@@ -1396,13 +1453,11 @@
graph()->NewNode(common()->ObjectState(input_count, vobj->id()),
input_count, &cache_->fields().front());
vobj->SetObjectState(new_object_state);
- if (FLAG_trace_turbo_escape) {
- PrintF(
- "Creating object state #%d for vobj %p (from node #%d) at effect "
- "#%d\n",
- new_object_state->id(), static_cast<void*>(vobj), node->id(),
- effect->id());
- }
+ TRACE(
+ "Creating object state #%d for vobj %p (from node #%d) at effect "
+ "#%d\n",
+ new_object_state->id(), static_cast<void*>(vobj), node->id(),
+ effect->id());
// Now fix uses of other objects.
for (size_t i = 0; i < vobj->field_count(); ++i) {
if (Node* field = vobj->GetField(i)) {
@@ -1420,7 +1475,6 @@
return nullptr;
}
-
void EscapeAnalysis::DebugPrintObject(VirtualObject* object, Alias alias) {
PrintF(" Alias @%d: Object #%d with %zu fields\n", alias, object->id(),
object->field_count());
@@ -1431,9 +1485,8 @@
}
}
-
void EscapeAnalysis::DebugPrintState(VirtualState* state) {
- PrintF("Dumping object state %p\n", static_cast<void*>(state));
+ PrintF("Dumping virtual state %p\n", static_cast<void*>(state));
for (Alias alias = 0; alias < AliasCount(); ++alias) {
if (VirtualObject* object = state->VirtualObjectFromAlias(alias)) {
DebugPrintObject(object, alias);
@@ -1441,7 +1494,6 @@
}
}
-
void EscapeAnalysis::DebugPrint() {
ZoneVector<VirtualState*> object_states(zone());
for (NodeId id = 0; id < virtual_states_.size(); id++) {
@@ -1457,15 +1509,26 @@
}
}
-
VirtualObject* EscapeAnalysis::GetVirtualObject(VirtualState* state,
Node* node) {
- if (node->id() >= aliases_.size()) return nullptr;
- Alias alias = aliases_[node->id()];
+ if (node->id() >= status_analysis_.GetAliasMap().size()) return nullptr;
+ Alias alias = GetAlias(node->id());
if (alias >= state->size()) return nullptr;
return state->VirtualObjectFromAlias(alias);
}
+bool EscapeAnalysis::ExistsVirtualAllocate() {
+ for (size_t id = 0; id < status_analysis_.GetAliasMap().size(); ++id) {
+ Alias alias = GetAlias(static_cast<NodeId>(id));
+ if (alias < EscapeStatusAnalysis::kUntrackable) {
+ if (status_analysis_.IsVirtual(static_cast<int>(id))) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
} // namespace compiler
} // namespace internal
} // namespace v8