blob: 1108d8304f855f74dcd5b3f6e2965104f068684c [file] [log] [blame]
Ben Murdochc5610432016-08-08 18:44:38 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/interpreter/bytecode-peephole-optimizer.h"
6
7#include "src/interpreter/constant-array-builder.h"
8#include "src/objects-inl.h"
9#include "src/objects.h"
10
11namespace v8 {
12namespace internal {
13namespace interpreter {
14
15BytecodePeepholeOptimizer::BytecodePeepholeOptimizer(
16 ConstantArrayBuilder* constant_array_builder,
17 BytecodePipelineStage* next_stage)
Ben Murdoch61f157c2016-09-16 13:49:30 +010018 : constant_array_builder_(constant_array_builder), next_stage_(next_stage) {
Ben Murdochc5610432016-08-08 18:44:38 +010019 InvalidateLast();
20}
21
Ben Murdoch61f157c2016-09-16 13:49:30 +010022// override
23Handle<BytecodeArray> BytecodePeepholeOptimizer::ToBytecodeArray(
24 int fixed_register_count, int parameter_count,
25 Handle<FixedArray> handler_table) {
26 Flush();
27 return next_stage_->ToBytecodeArray(fixed_register_count, parameter_count,
28 handler_table);
29}
30
31// override
32void BytecodePeepholeOptimizer::Write(BytecodeNode* node) {
33 node = OptimizeAndEmitLast(node);
34 if (node != nullptr) {
35 SetLast(node);
36 }
37}
38
39// override
40void BytecodePeepholeOptimizer::WriteJump(BytecodeNode* node,
41 BytecodeLabel* label) {
42 node = OptimizeAndEmitLast(node);
43 next_stage_->WriteJump(node, label);
44}
45
46// override
47void BytecodePeepholeOptimizer::BindLabel(BytecodeLabel* label) {
48 Flush();
49 next_stage_->BindLabel(label);
50}
51
52// override
53void BytecodePeepholeOptimizer::BindLabel(const BytecodeLabel& target,
54 BytecodeLabel* label) {
55 // There is no need to flush here, it will have been flushed when |target|
56 // was bound.
57 next_stage_->BindLabel(target, label);
58}
59
60void BytecodePeepholeOptimizer::Flush() {
61 // TODO(oth/rmcilroy): We could check CanElideLast() here to potentially
62 // eliminate last rather than writing it.
63 if (LastIsValid()) {
64 next_stage_->Write(&last_);
65 InvalidateLast();
66 }
67}
68
Ben Murdochc5610432016-08-08 18:44:38 +010069void BytecodePeepholeOptimizer::InvalidateLast() {
70 last_.set_bytecode(Bytecode::kIllegal);
71}
72
73bool BytecodePeepholeOptimizer::LastIsValid() const {
74 return last_.bytecode() != Bytecode::kIllegal;
75}
76
77void BytecodePeepholeOptimizer::SetLast(const BytecodeNode* const node) {
78 last_.Clone(node);
Ben Murdochc5610432016-08-08 18:44:38 +010079}
80
81Handle<Object> BytecodePeepholeOptimizer::GetConstantForIndexOperand(
82 const BytecodeNode* const node, int index) const {
83 DCHECK_LE(index, node->operand_count());
84 DCHECK_EQ(Bytecodes::GetOperandType(node->bytecode(), 0), OperandType::kIdx);
85 uint32_t index_operand = node->operand(0);
86 return constant_array_builder_->At(index_operand);
87}
88
89bool BytecodePeepholeOptimizer::LastBytecodePutsNameInAccumulator() const {
90 DCHECK(LastIsValid());
91 return (last_.bytecode() == Bytecode::kTypeOf ||
92 last_.bytecode() == Bytecode::kToName ||
93 (last_.bytecode() == Bytecode::kLdaConstant &&
94 GetConstantForIndexOperand(&last_, 0)->IsName()));
95}
96
Ben Murdoch61f157c2016-09-16 13:49:30 +010097void BytecodePeepholeOptimizer::TryToRemoveLastExpressionPosition(
98 const BytecodeNode* const current) {
99 if (current->source_info().is_valid() &&
100 last_.source_info().is_expression() &&
101 Bytecodes::IsWithoutExternalSideEffects(last_.bytecode())) {
102 // The last bytecode has been marked as expression. It has no
103 // external effects so can't throw and the current bytecode is a
104 // source position. Remove the expression position on the last
105 // bytecode to open up potential peephole optimizations and to
106 // save the memory and perf cost of storing the unneeded
107 // expression position.
108 last_.source_info().set_invalid();
Ben Murdochc5610432016-08-08 18:44:38 +0100109 }
110}
111
112bool BytecodePeepholeOptimizer::CanElideCurrent(
113 const BytecodeNode* const current) const {
114 if (Bytecodes::IsLdarOrStar(last_.bytecode()) &&
115 Bytecodes::IsLdarOrStar(current->bytecode()) &&
116 current->operand(0) == last_.operand(0)) {
117 // Ldar and Star make the accumulator and register hold equivalent
118 // values. Only the first bytecode is needed if there's a sequence
119 // of back-to-back Ldar and Star bytecodes with the same operand.
120 return true;
121 } else if (current->bytecode() == Bytecode::kToName &&
122 LastBytecodePutsNameInAccumulator()) {
123 // If the previous bytecode ensured a name was in the accumulator,
124 // the type coercion ToName() can be elided.
125 return true;
126 } else {
127 // Additional candidates for eliding current:
128 // (i) ToNumber if the last puts a number in the accumulator.
129 return false;
130 }
131}
132
Ben Murdoch61f157c2016-09-16 13:49:30 +0100133bool BytecodePeepholeOptimizer::CanElideLastBasedOnSourcePosition(
134 const BytecodeNode* const current) const {
135 //
136 // The rules for allowing the elision of the last bytecode based
137 // on source position are:
138 //
139 // C U R R E N T
140 // +--------+--------+--------+
141 // | None | Expr | Stmt |
142 // L +--------+--------+--------+--------+
143 // | None | YES | YES | YES |
144 // A +--------+--------+--------+--------+
145 // | Expr | YES | MAYBE | MAYBE |
146 // S +--------+--------+--------+--------+
147 // | Stmt | YES | NO | NO |
148 // T +--------+--------+--------+--------+
149 //
150 // The goal is not lose any statement positions and not lose useful
151 // expression positions. Whenever the last bytecode is elided it's
152 // source position information is applied to the current node
153 // updating it if necessary.
154 //
155 // The last bytecode can be elided for the MAYBE cases if the last
156 // bytecode is known not to throw. If it throws, the system would
157 // not have correct stack trace information. The appropriate check
158 // for this would be Bytecodes::IsWithoutExternalSideEffects(),
159 // which is checked in
160 // BytecodePeepholeOptimizer::TransformLastAndCurrentBytecodes() to
161 // keep the check here simple.
162 //
163 // In rare cases, bytecode generation produces consecutive bytecodes
164 // with the same expression positions. In principle, the latter of
165 // these can be elided, but would make this function more expensive.
166 //
167 return (!last_.source_info().is_valid() ||
168 !current->source_info().is_valid());
169}
170
171namespace {
172
173void TransformLdaStarToLdrLdar(Bytecode new_bytecode, BytecodeNode* const last,
174 BytecodeNode* const current) {
175 DCHECK_EQ(current->bytecode(), Bytecode::kStar);
176
177 //
178 // An example transformation here would be:
179 //
180 // LdaGlobal i0, i1 ____\ LdrGlobal i0, i1, R
181 // Star R ====/ Ldar R
182 //
183 // which loads a global value into both a register and the
184 // accumulator. However, in the second form the Ldar can often be
185 // peephole optimized away unlike the Star in the first form.
186 //
187 last->Transform(new_bytecode, current->operand(0));
188 current->set_bytecode(Bytecode::kLdar, current->operand(0));
189}
190
191} // namespace
192
193bool BytecodePeepholeOptimizer::TransformLastAndCurrentBytecodes(
194 BytecodeNode* const current) {
195 if (current->bytecode() == Bytecode::kStar &&
196 !current->source_info().is_statement()) {
197 // Note: If the Star is tagged with a statement position, we can't
198 // perform this transform as the store to the register will
199 // have the wrong ordering for stepping in the debugger.
200 switch (last_.bytecode()) {
201 case Bytecode::kLdaNamedProperty:
202 TransformLdaStarToLdrLdar(Bytecode::kLdrNamedProperty, &last_, current);
203 return true;
204 case Bytecode::kLdaKeyedProperty:
205 TransformLdaStarToLdrLdar(Bytecode::kLdrKeyedProperty, &last_, current);
206 return true;
207 case Bytecode::kLdaGlobal:
208 TransformLdaStarToLdrLdar(Bytecode::kLdrGlobal, &last_, current);
209 return true;
210 case Bytecode::kLdaContextSlot:
211 TransformLdaStarToLdrLdar(Bytecode::kLdrContextSlot, &last_, current);
212 return true;
213 case Bytecode::kLdaUndefined:
214 TransformLdaStarToLdrLdar(Bytecode::kLdrUndefined, &last_, current);
215 return true;
216 default:
217 break;
218 }
219 }
220 return false;
221}
222
223bool BytecodePeepholeOptimizer::RemoveToBooleanFromJump(
224 BytecodeNode* const current) {
225 bool can_remove = Bytecodes::IsJumpIfToBoolean(current->bytecode()) &&
226 Bytecodes::WritesBooleanToAccumulator(last_.bytecode());
227 if (can_remove) {
228 // Conditional jumps with boolean conditions are emiitted in
229 // ToBoolean form by the bytecode array builder,
230 // i.e. JumpIfToBooleanTrue rather JumpIfTrue. The ToBoolean
231 // element can be removed if the previous bytecode put a boolean
232 // value in the accumulator.
233 Bytecode jump = Bytecodes::GetJumpWithoutToBoolean(current->bytecode());
234 current->set_bytecode(jump, current->operand(0));
235 }
236 return can_remove;
237}
238
239bool BytecodePeepholeOptimizer::RemoveToBooleanFromLogicalNot(
240 BytecodeNode* const current) {
241 bool can_remove = current->bytecode() == Bytecode::kToBooleanLogicalNot &&
242 Bytecodes::WritesBooleanToAccumulator(last_.bytecode());
243 if (can_remove) {
244 // Logical-nots are emitted in ToBoolean form by the bytecode array
245 // builder, The ToBoolean element can be removed if the previous bytecode
246 // put a boolean value in the accumulator.
247 current->set_bytecode(Bytecode::kLogicalNot);
248 }
249 return can_remove;
250}
251
252bool BytecodePeepholeOptimizer::TransformCurrentBytecode(
253 BytecodeNode* const current) {
254 return RemoveToBooleanFromJump(current) ||
255 RemoveToBooleanFromLogicalNot(current);
256}
257
Ben Murdochc5610432016-08-08 18:44:38 +0100258bool BytecodePeepholeOptimizer::CanElideLast(
259 const BytecodeNode* const current) const {
Ben Murdochc5610432016-08-08 18:44:38 +0100260 if (last_.bytecode() == Bytecode::kNop) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100261 // Nop are placeholders for holding source position information.
Ben Murdochc5610432016-08-08 18:44:38 +0100262 return true;
263 } else if (Bytecodes::IsAccumulatorLoadWithoutEffects(current->bytecode()) &&
264 Bytecodes::IsAccumulatorLoadWithoutEffects(last_.bytecode())) {
265 // The accumulator is invisible to the debugger. If there is a sequence of
266 // consecutive accumulator loads (that don't have side effects) then only
267 // the final load is potentially visible.
268 return true;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100269 } else if (Bytecodes::GetAccumulatorUse(current->bytecode()) ==
270 AccumulatorUse::kWrite &&
271 Bytecodes::IsAccumulatorLoadWithoutEffects(last_.bytecode())) {
272 // The current instruction clobbers the accumulator without reading it. The
273 // load in the last instruction can be elided as it has no effect.
274 return true;
Ben Murdochc5610432016-08-08 18:44:38 +0100275 } else {
276 return false;
277 }
278}
279
280BytecodeNode* BytecodePeepholeOptimizer::Optimize(BytecodeNode* current) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100281 TryToRemoveLastExpressionPosition(current);
282
283 if (TransformCurrentBytecode(current) ||
284 TransformLastAndCurrentBytecodes(current)) {
285 return current;
286 }
Ben Murdochc5610432016-08-08 18:44:38 +0100287
288 if (CanElideCurrent(current)) {
289 if (current->source_info().is_valid()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100290 // Preserve the source information by replacing the current bytecode
291 // with a no op bytecode.
Ben Murdochc5610432016-08-08 18:44:38 +0100292 current->set_bytecode(Bytecode::kNop);
293 } else {
294 current = nullptr;
295 }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100296 return current;
297 }
298
299 if (CanElideLast(current) && CanElideLastBasedOnSourcePosition(current)) {
Ben Murdochc5610432016-08-08 18:44:38 +0100300 if (last_.source_info().is_valid()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100301 // Current can not be valid per CanElideLastBasedOnSourcePosition().
302 current->source_info().Clone(last_.source_info());
Ben Murdochc5610432016-08-08 18:44:38 +0100303 }
304 InvalidateLast();
Ben Murdoch61f157c2016-09-16 13:49:30 +0100305 return current;
306 }
307
308 return current;
309}
310
311BytecodeNode* BytecodePeepholeOptimizer::OptimizeAndEmitLast(
312 BytecodeNode* current) {
313 // Attempt optimization if there is an earlier node to optimize with.
314 if (LastIsValid()) {
315 current = Optimize(current);
316 // Only output the last node if it wasn't invalidated by the optimization.
317 if (LastIsValid()) {
318 next_stage_->Write(&last_);
319 InvalidateLast();
320 }
Ben Murdochc5610432016-08-08 18:44:38 +0100321 }
322 return current;
323}
324
325} // namespace interpreter
326} // namespace internal
327} // namespace v8