Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "global_value_numbering.h" |
Andreas Gampe | 0b9203e | 2015-01-22 20:39:27 -0800 | [diff] [blame] | 18 | |
Vladimir Marko | 22fe45d | 2015-03-18 11:33:58 +0000 | [diff] [blame] | 19 | #include "base/bit_vector-inl.h" |
Andreas Gampe | 0b9203e | 2015-01-22 20:39:27 -0800 | [diff] [blame] | 20 | #include "base/stl_util.h" |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 21 | #include "local_value_numbering.h" |
| 22 | |
| 23 | namespace art { |
| 24 | |
Vladimir Marko | 415ac88 | 2014-09-30 18:09:14 +0100 | [diff] [blame] | 25 | GlobalValueNumbering::GlobalValueNumbering(CompilationUnit* cu, ScopedArenaAllocator* allocator, |
| 26 | Mode mode) |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 27 | : cu_(cu), |
Vladimir Marko | 55fff04 | 2014-07-10 12:42:52 +0100 | [diff] [blame] | 28 | mir_graph_(cu->mir_graph.get()), |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 29 | allocator_(allocator), |
Vladimir Marko | 55fff04 | 2014-07-10 12:42:52 +0100 | [diff] [blame] | 30 | bbs_processed_(0u), |
| 31 | max_bbs_to_process_(kMaxBbsToProcessMultiplyFactor * mir_graph_->GetNumReachableBlocks()), |
Vladimir Marko | 7a01dc2 | 2015-01-02 17:00:44 +0000 | [diff] [blame] | 32 | last_value_(kNullValue), |
Vladimir Marko | 415ac88 | 2014-09-30 18:09:14 +0100 | [diff] [blame] | 33 | modifications_allowed_(true), |
| 34 | mode_(mode), |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 35 | global_value_map_(std::less<uint64_t>(), allocator->Adapter()), |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 36 | array_location_map_(ArrayLocationComparator(), allocator->Adapter()), |
| 37 | array_location_reverse_map_(allocator->Adapter()), |
| 38 | ref_set_map_(std::less<ValueNameSet>(), allocator->Adapter()), |
Vladimir Marko | 55fff04 | 2014-07-10 12:42:52 +0100 | [diff] [blame] | 39 | lvns_(mir_graph_->GetNumBlocks(), nullptr, allocator->Adapter()), |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 40 | work_lvn_(nullptr), |
| 41 | merge_lvns_(allocator->Adapter()) { |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 42 | } |
| 43 | |
| 44 | GlobalValueNumbering::~GlobalValueNumbering() { |
| 45 | STLDeleteElements(&lvns_); |
| 46 | } |
| 47 | |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 48 | LocalValueNumbering* GlobalValueNumbering::PrepareBasicBlock(BasicBlock* bb, |
| 49 | ScopedArenaAllocator* allocator) { |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 50 | if (UNLIKELY(!Good())) { |
| 51 | return nullptr; |
| 52 | } |
Vladimir Marko | 415ac88 | 2014-09-30 18:09:14 +0100 | [diff] [blame] | 53 | if (bb->block_type != kDalvikByteCode && bb->block_type != kEntryBlock) { |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 54 | DCHECK(bb->first_mir_insn == nullptr); |
| 55 | return nullptr; |
| 56 | } |
Vladimir Marko | 415ac88 | 2014-09-30 18:09:14 +0100 | [diff] [blame] | 57 | if (mode_ == kModeGvn && UNLIKELY(bbs_processed_ == max_bbs_to_process_)) { |
Vladimir Marko | 2d2365c | 2014-08-19 18:08:39 +0100 | [diff] [blame] | 58 | // If we're still trying to converge, stop now. Otherwise, proceed to apply optimizations. |
Vladimir Marko | 415ac88 | 2014-09-30 18:09:14 +0100 | [diff] [blame] | 59 | last_value_ = kNoValue; // Make bad. |
| 60 | return nullptr; |
| 61 | } |
| 62 | if (mode_ == kModeGvnPostProcessing && |
| 63 | mir_graph_->GetTopologicalSortOrderLoopHeadStack()->empty()) { |
| 64 | // Modifications outside loops are performed during the main phase. |
| 65 | return nullptr; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 66 | } |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 67 | if (allocator == nullptr) { |
| 68 | allocator = allocator_; |
| 69 | } |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 70 | DCHECK(work_lvn_.get() == nullptr); |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 71 | work_lvn_.reset(new (allocator) LocalValueNumbering(this, bb->id, allocator)); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 72 | if (bb->block_type == kEntryBlock) { |
Vladimir Marko | a4426cf | 2014-10-22 17:15:53 +0100 | [diff] [blame] | 73 | work_lvn_->PrepareEntryBlock(); |
Vladimir Marko | 415ac88 | 2014-09-30 18:09:14 +0100 | [diff] [blame] | 74 | DCHECK(bb->first_mir_insn == nullptr); // modifications_allowed_ is irrelevant. |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 75 | } else { |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 76 | // To avoid repeated allocation on the ArenaStack, reuse a single vector kept as a member. |
| 77 | DCHECK(merge_lvns_.empty()); |
Vladimir Marko | 55fff04 | 2014-07-10 12:42:52 +0100 | [diff] [blame] | 78 | // If we're running the full GVN, the RepeatingTopologicalSortIterator keeps the loop |
| 79 | // head stack in the MIRGraph up to date and for a loop head we need to check whether |
| 80 | // we're making the initial computation and need to merge only preceding blocks in the |
| 81 | // topological order, or we're recalculating a loop head and need to merge all incoming |
| 82 | // LVNs. When we're not at a loop head (including having an empty loop head stack) all |
| 83 | // predecessors should be preceding blocks and we shall merge all of them anyway. |
Vladimir Marko | 55fff04 | 2014-07-10 12:42:52 +0100 | [diff] [blame] | 84 | bool use_all_predecessors = true; |
| 85 | uint16_t loop_head_idx = 0u; // Used only if !use_all_predecessors. |
Vladimir Marko | 415ac88 | 2014-09-30 18:09:14 +0100 | [diff] [blame] | 86 | if (mode_ == kModeGvn && mir_graph_->GetTopologicalSortOrderLoopHeadStack()->size() != 0) { |
Vladimir Marko | 55fff04 | 2014-07-10 12:42:52 +0100 | [diff] [blame] | 87 | // Full GVN inside a loop, see if we're at the loop head for the first time. |
Vladimir Marko | 415ac88 | 2014-09-30 18:09:14 +0100 | [diff] [blame] | 88 | modifications_allowed_ = false; |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 89 | auto top = mir_graph_->GetTopologicalSortOrderLoopHeadStack()->back(); |
Vladimir Marko | 55fff04 | 2014-07-10 12:42:52 +0100 | [diff] [blame] | 90 | loop_head_idx = top.first; |
| 91 | bool recalculating = top.second; |
| 92 | use_all_predecessors = recalculating || |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 93 | loop_head_idx != mir_graph_->GetTopologicalSortOrderIndexes()[bb->id]; |
Vladimir Marko | 415ac88 | 2014-09-30 18:09:14 +0100 | [diff] [blame] | 94 | } else { |
| 95 | modifications_allowed_ = true; |
Vladimir Marko | 55fff04 | 2014-07-10 12:42:52 +0100 | [diff] [blame] | 96 | } |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 97 | for (BasicBlockId pred_id : bb->predecessors) { |
| 98 | DCHECK_NE(pred_id, NullBasicBlockId); |
| 99 | if (lvns_[pred_id] != nullptr && |
Vladimir Marko | 55fff04 | 2014-07-10 12:42:52 +0100 | [diff] [blame] | 100 | (use_all_predecessors || |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 101 | mir_graph_->GetTopologicalSortOrderIndexes()[pred_id] < loop_head_idx)) { |
| 102 | merge_lvns_.push_back(lvns_[pred_id]); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 103 | } |
| 104 | } |
| 105 | // Determine merge type. |
| 106 | LocalValueNumbering::MergeType merge_type = LocalValueNumbering::kNormalMerge; |
| 107 | if (bb->catch_entry) { |
| 108 | merge_type = LocalValueNumbering::kCatchMerge; |
| 109 | } else if (bb->last_mir_insn != nullptr && |
Vladimir Marko | 321b987 | 2014-11-24 16:33:51 +0000 | [diff] [blame] | 110 | IsInstructionReturn(bb->last_mir_insn->dalvikInsn.opcode) && |
Vladimir Marko | 26e7d45 | 2014-11-24 14:09:46 +0000 | [diff] [blame] | 111 | bb->GetFirstNonPhiInsn() == bb->last_mir_insn) { |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 112 | merge_type = LocalValueNumbering::kReturnMerge; |
| 113 | } |
| 114 | // At least one predecessor must have been processed before this bb. |
| 115 | CHECK(!merge_lvns_.empty()); |
| 116 | if (merge_lvns_.size() == 1u) { |
| 117 | work_lvn_->MergeOne(*merge_lvns_[0], merge_type); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 118 | } else { |
| 119 | work_lvn_->Merge(merge_type); |
| 120 | } |
| 121 | } |
| 122 | return work_lvn_.get(); |
| 123 | } |
| 124 | |
| 125 | bool GlobalValueNumbering::FinishBasicBlock(BasicBlock* bb) { |
| 126 | DCHECK(work_lvn_ != nullptr); |
Vladimir Marko | 55fff04 | 2014-07-10 12:42:52 +0100 | [diff] [blame] | 127 | DCHECK_EQ(bb->id, work_lvn_->Id()); |
| 128 | ++bbs_processed_; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 129 | merge_lvns_.clear(); |
| 130 | |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 131 | bool change = (lvns_[bb->id] == nullptr) || !lvns_[bb->id]->Equals(*work_lvn_); |
Vladimir Marko | 7a01dc2 | 2015-01-02 17:00:44 +0000 | [diff] [blame] | 132 | if (mode_ == kModeGvn) { |
| 133 | // In GVN mode, keep the latest LVN even if Equals() indicates no change. This is |
| 134 | // to keep the correct values of fields that do not contribute to Equals() as long |
| 135 | // as they depend only on predecessor LVNs' fields that do contribute to Equals(). |
| 136 | // Currently, that's LVN::merge_map_ used by LVN::GetStartingVregValueNumberImpl(). |
Vladimir Marko | b19955d | 2014-07-29 12:04:10 +0100 | [diff] [blame] | 137 | std::unique_ptr<const LocalValueNumbering> old_lvn(lvns_[bb->id]); |
| 138 | lvns_[bb->id] = work_lvn_.release(); |
| 139 | } else { |
| 140 | work_lvn_.reset(); |
| 141 | } |
| 142 | return change; |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 143 | } |
| 144 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 145 | uint16_t GlobalValueNumbering::GetArrayLocation(uint16_t base, uint16_t index) { |
| 146 | auto cmp = array_location_map_.key_comp(); |
| 147 | ArrayLocation key = { base, index }; |
| 148 | auto lb = array_location_map_.lower_bound(key); |
| 149 | if (lb != array_location_map_.end() && !cmp(key, lb->first)) { |
| 150 | return lb->second; |
| 151 | } |
| 152 | uint16_t location = static_cast<uint16_t>(array_location_reverse_map_.size()); |
| 153 | DCHECK_EQ(location, array_location_reverse_map_.size()); // No overflow. |
| 154 | auto it = array_location_map_.PutBefore(lb, key, location); |
| 155 | array_location_reverse_map_.push_back(&*it); |
| 156 | return location; |
| 157 | } |
| 158 | |
| 159 | bool GlobalValueNumbering::HasNullCheckLastInsn(const BasicBlock* pred_bb, |
| 160 | BasicBlockId succ_id) { |
| 161 | if (pred_bb->block_type != kDalvikByteCode || pred_bb->last_mir_insn == nullptr) { |
| 162 | return false; |
| 163 | } |
| 164 | Instruction::Code last_opcode = pred_bb->last_mir_insn->dalvikInsn.opcode; |
| 165 | return ((last_opcode == Instruction::IF_EQZ && pred_bb->fall_through == succ_id) || |
| 166 | (last_opcode == Instruction::IF_NEZ && pred_bb->taken == succ_id)); |
| 167 | } |
| 168 | |
| 169 | bool GlobalValueNumbering::NullCheckedInAllPredecessors( |
| 170 | const ScopedArenaVector<uint16_t>& merge_names) const { |
| 171 | // Implicit parameters: |
| 172 | // - *work_lvn: the LVN for which we're checking predecessors. |
| 173 | // - merge_lvns_: the predecessor LVNs. |
| 174 | DCHECK_EQ(merge_lvns_.size(), merge_names.size()); |
| 175 | for (size_t i = 0, size = merge_lvns_.size(); i != size; ++i) { |
| 176 | const LocalValueNumbering* pred_lvn = merge_lvns_[i]; |
| 177 | uint16_t value_name = merge_names[i]; |
| 178 | if (!pred_lvn->IsValueNullChecked(value_name)) { |
| 179 | // Check if the predecessor has an IF_EQZ/IF_NEZ as the last insn. |
Vladimir Marko | 55fff04 | 2014-07-10 12:42:52 +0100 | [diff] [blame] | 180 | const BasicBlock* pred_bb = mir_graph_->GetBasicBlock(pred_lvn->Id()); |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 181 | if (!HasNullCheckLastInsn(pred_bb, work_lvn_->Id())) { |
| 182 | return false; |
| 183 | } |
| 184 | // IF_EQZ/IF_NEZ checks some sreg, see if that sreg contains the value_name. |
| 185 | int s_reg = pred_bb->last_mir_insn->ssa_rep->uses[0]; |
Vladimir Marko | 7a01dc2 | 2015-01-02 17:00:44 +0000 | [diff] [blame] | 186 | if (pred_lvn->GetSregValue(s_reg) != value_name) { |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 187 | return false; |
| 188 | } |
| 189 | } |
| 190 | } |
| 191 | return true; |
| 192 | } |
| 193 | |
Razvan A Lupusoru | e095114 | 2014-11-14 14:36:55 -0800 | [diff] [blame] | 194 | bool GlobalValueNumbering::DivZeroCheckedInAllPredecessors( |
| 195 | const ScopedArenaVector<uint16_t>& merge_names) const { |
| 196 | // Implicit parameters: |
| 197 | // - *work_lvn: the LVN for which we're checking predecessors. |
| 198 | // - merge_lvns_: the predecessor LVNs. |
| 199 | DCHECK_EQ(merge_lvns_.size(), merge_names.size()); |
| 200 | for (size_t i = 0, size = merge_lvns_.size(); i != size; ++i) { |
| 201 | const LocalValueNumbering* pred_lvn = merge_lvns_[i]; |
| 202 | uint16_t value_name = merge_names[i]; |
| 203 | if (!pred_lvn->IsValueDivZeroChecked(value_name)) { |
| 204 | return false; |
| 205 | } |
| 206 | } |
| 207 | return true; |
| 208 | } |
| 209 | |
Vladimir Marko | 22fe45d | 2015-03-18 11:33:58 +0000 | [diff] [blame] | 210 | bool GlobalValueNumbering::IsBlockEnteredOnTrue(uint16_t cond, BasicBlockId bb_id) { |
| 211 | DCHECK_NE(cond, kNoValue); |
| 212 | BasicBlock* bb = mir_graph_->GetBasicBlock(bb_id); |
| 213 | if (bb->predecessors.size() == 1u) { |
| 214 | BasicBlockId pred_id = bb->predecessors[0]; |
| 215 | BasicBlock* pred_bb = mir_graph_->GetBasicBlock(pred_id); |
| 216 | if (pred_bb->last_mir_insn != nullptr) { |
| 217 | Instruction::Code opcode = pred_bb->last_mir_insn->dalvikInsn.opcode; |
| 218 | if ((opcode == Instruction::IF_NEZ && pred_bb->taken == bb_id) || |
| 219 | (opcode == Instruction::IF_EQZ && pred_bb->fall_through == bb_id)) { |
| 220 | DCHECK(lvns_[pred_id] != nullptr); |
| 221 | uint16_t operand = lvns_[pred_id]->GetSregValue(pred_bb->last_mir_insn->ssa_rep->uses[0]); |
| 222 | if (operand == cond) { |
| 223 | return true; |
| 224 | } |
| 225 | } |
| 226 | } |
| 227 | } |
| 228 | return false; |
| 229 | } |
| 230 | |
| 231 | bool GlobalValueNumbering::IsTrueInBlock(uint16_t cond, BasicBlockId bb_id) { |
| 232 | // We're not doing proper value propagation, so just see if the condition is used |
| 233 | // with if-nez/if-eqz to branch/fall-through to this bb or one of its dominators. |
| 234 | DCHECK_NE(cond, kNoValue); |
| 235 | if (IsBlockEnteredOnTrue(cond, bb_id)) { |
| 236 | return true; |
| 237 | } |
| 238 | BasicBlock* bb = mir_graph_->GetBasicBlock(bb_id); |
| 239 | for (uint32_t dom_id : bb->dominators->Indexes()) { |
| 240 | if (IsBlockEnteredOnTrue(cond, dom_id)) { |
| 241 | return true; |
| 242 | } |
| 243 | } |
| 244 | return false; |
| 245 | } |
| 246 | |
Vladimir Marko | 95a0597 | 2014-05-30 10:01:32 +0100 | [diff] [blame] | 247 | } // namespace art |