Ben Murdoch | c561043 | 2016-08-08 18:44:38 +0100 | [diff] [blame^] | 1 | // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
| 5 | #include "src/interpreter/bytecode-peephole-optimizer.h" |
| 6 | |
| 7 | #include "src/interpreter/constant-array-builder.h" |
| 8 | #include "src/objects-inl.h" |
| 9 | #include "src/objects.h" |
| 10 | |
| 11 | namespace v8 { |
| 12 | namespace internal { |
| 13 | namespace interpreter { |
| 14 | |
| 15 | BytecodePeepholeOptimizer::BytecodePeepholeOptimizer( |
| 16 | ConstantArrayBuilder* constant_array_builder, |
| 17 | BytecodePipelineStage* next_stage) |
| 18 | : constant_array_builder_(constant_array_builder), |
| 19 | next_stage_(next_stage), |
| 20 | last_is_discardable_(false) { |
| 21 | InvalidateLast(); |
| 22 | } |
| 23 | |
| 24 | void BytecodePeepholeOptimizer::InvalidateLast() { |
| 25 | last_.set_bytecode(Bytecode::kIllegal); |
| 26 | } |
| 27 | |
| 28 | bool BytecodePeepholeOptimizer::LastIsValid() const { |
| 29 | return last_.bytecode() != Bytecode::kIllegal; |
| 30 | } |
| 31 | |
| 32 | void BytecodePeepholeOptimizer::SetLast(const BytecodeNode* const node) { |
| 33 | last_.Clone(node); |
| 34 | last_is_discardable_ = true; |
| 35 | } |
| 36 | |
| 37 | // override |
| 38 | size_t BytecodePeepholeOptimizer::FlushForOffset() { |
| 39 | size_t buffered_size = next_stage_->FlushForOffset(); |
| 40 | if (LastIsValid()) { |
| 41 | if (last_.bytecode() == Bytecode::kNop && |
| 42 | !last_.source_info().is_statement()) { |
| 43 | // The Nop can be dropped as it doesn't have a statement |
| 44 | // position for the debugger and doesn't have any effects by |
| 45 | // definition. |
| 46 | InvalidateLast(); |
| 47 | } else { |
| 48 | buffered_size += last_.Size(); |
| 49 | last_is_discardable_ = false; |
| 50 | } |
| 51 | } |
| 52 | return buffered_size; |
| 53 | } |
| 54 | |
| 55 | // override |
| 56 | void BytecodePeepholeOptimizer::FlushBasicBlock() { |
| 57 | if (LastIsValid()) { |
| 58 | next_stage_->Write(&last_); |
| 59 | InvalidateLast(); |
| 60 | } |
| 61 | next_stage_->FlushBasicBlock(); |
| 62 | } |
| 63 | |
| 64 | // override |
| 65 | void BytecodePeepholeOptimizer::Write(BytecodeNode* node) { |
| 66 | // Attempt optimization if there is an earlier node to optimize with. |
| 67 | if (LastIsValid()) { |
| 68 | node = Optimize(node); |
| 69 | // Only output the last node if it wasn't invalidated by the optimization. |
| 70 | if (LastIsValid()) { |
| 71 | next_stage_->Write(&last_); |
| 72 | InvalidateLast(); |
| 73 | } |
| 74 | } |
| 75 | |
| 76 | if (node != nullptr) { |
| 77 | SetLast(node); |
| 78 | } |
| 79 | } |
| 80 | |
| 81 | Handle<Object> BytecodePeepholeOptimizer::GetConstantForIndexOperand( |
| 82 | const BytecodeNode* const node, int index) const { |
| 83 | DCHECK_LE(index, node->operand_count()); |
| 84 | DCHECK_EQ(Bytecodes::GetOperandType(node->bytecode(), 0), OperandType::kIdx); |
| 85 | uint32_t index_operand = node->operand(0); |
| 86 | return constant_array_builder_->At(index_operand); |
| 87 | } |
| 88 | |
| 89 | bool BytecodePeepholeOptimizer::LastBytecodePutsNameInAccumulator() const { |
| 90 | DCHECK(LastIsValid()); |
| 91 | return (last_.bytecode() == Bytecode::kTypeOf || |
| 92 | last_.bytecode() == Bytecode::kToName || |
| 93 | (last_.bytecode() == Bytecode::kLdaConstant && |
| 94 | GetConstantForIndexOperand(&last_, 0)->IsName())); |
| 95 | } |
| 96 | |
| 97 | void BytecodePeepholeOptimizer::UpdateCurrentBytecode(BytecodeNode* current) { |
| 98 | if (Bytecodes::IsJumpIfToBoolean(current->bytecode()) && |
| 99 | Bytecodes::WritesBooleanToAccumulator(last_.bytecode())) { |
| 100 | // Conditional jumps with boolean conditions are emitted in |
| 101 | // ToBoolean form by the bytecode array builder, |
| 102 | // i.e. JumpIfToBooleanTrue rather JumpIfTrue. The ToBoolean element |
| 103 | // can be removed if the previous bytecode put a boolean value in |
| 104 | // the accumulator. |
| 105 | Bytecode jump = Bytecodes::GetJumpWithoutToBoolean(current->bytecode()); |
| 106 | current->set_bytecode(jump, current->operand(0), current->operand_scale()); |
| 107 | } else if (current->bytecode() == Bytecode::kToBooleanLogicalNot && |
| 108 | Bytecodes::WritesBooleanToAccumulator(last_.bytecode())) { |
| 109 | // Logical-nots are emitted in ToBoolean form by the bytecode array |
| 110 | // builder, The ToBoolean element can be removed if the previous bytecode |
| 111 | // put a boolean value in the accumulator. |
| 112 | current->set_bytecode(Bytecode::kLogicalNot); |
| 113 | } |
| 114 | } |
| 115 | |
| 116 | bool BytecodePeepholeOptimizer::CanElideCurrent( |
| 117 | const BytecodeNode* const current) const { |
| 118 | if (Bytecodes::IsLdarOrStar(last_.bytecode()) && |
| 119 | Bytecodes::IsLdarOrStar(current->bytecode()) && |
| 120 | current->operand(0) == last_.operand(0)) { |
| 121 | // Ldar and Star make the accumulator and register hold equivalent |
| 122 | // values. Only the first bytecode is needed if there's a sequence |
| 123 | // of back-to-back Ldar and Star bytecodes with the same operand. |
| 124 | return true; |
| 125 | } else if (current->bytecode() == Bytecode::kToName && |
| 126 | LastBytecodePutsNameInAccumulator()) { |
| 127 | // If the previous bytecode ensured a name was in the accumulator, |
| 128 | // the type coercion ToName() can be elided. |
| 129 | return true; |
| 130 | } else { |
| 131 | // Additional candidates for eliding current: |
| 132 | // (i) ToNumber if the last puts a number in the accumulator. |
| 133 | return false; |
| 134 | } |
| 135 | } |
| 136 | |
| 137 | bool BytecodePeepholeOptimizer::CanElideLast( |
| 138 | const BytecodeNode* const current) const { |
| 139 | if (!last_is_discardable_) { |
| 140 | return false; |
| 141 | } |
| 142 | |
| 143 | if (last_.bytecode() == Bytecode::kNop) { |
| 144 | // Nop are placeholders for holding source position information |
| 145 | // and can be elided. |
| 146 | return true; |
| 147 | } else if (Bytecodes::IsAccumulatorLoadWithoutEffects(current->bytecode()) && |
| 148 | Bytecodes::IsAccumulatorLoadWithoutEffects(last_.bytecode())) { |
| 149 | // The accumulator is invisible to the debugger. If there is a sequence of |
| 150 | // consecutive accumulator loads (that don't have side effects) then only |
| 151 | // the final load is potentially visible. |
| 152 | return true; |
| 153 | } else { |
| 154 | return false; |
| 155 | } |
| 156 | } |
| 157 | |
| 158 | BytecodeNode* BytecodePeepholeOptimizer::Optimize(BytecodeNode* current) { |
| 159 | UpdateCurrentBytecode(current); |
| 160 | |
| 161 | if (CanElideCurrent(current)) { |
| 162 | if (current->source_info().is_valid()) { |
| 163 | current->set_bytecode(Bytecode::kNop); |
| 164 | } else { |
| 165 | current = nullptr; |
| 166 | } |
| 167 | } else if (CanElideLast(current)) { |
| 168 | if (last_.source_info().is_valid()) { |
| 169 | current->source_info().Update(last_.source_info()); |
| 170 | } |
| 171 | InvalidateLast(); |
| 172 | } |
| 173 | return current; |
| 174 | } |
| 175 | |
| 176 | } // namespace interpreter |
| 177 | } // namespace internal |
| 178 | } // namespace v8 |