blob: 5c3d3d7135b7158e27f1960561f27a82332e942d [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/common-operator-reducer.h"
6
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include <algorithm>
8
Emily Bernierd0a1eb72015-03-24 16:35:39 -04009#include "src/compiler/common-operator.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/compiler/graph.h"
11#include "src/compiler/machine-operator.h"
12#include "src/compiler/node.h"
13#include "src/compiler/node-matchers.h"
14#include "src/compiler/node-properties.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040015
16namespace v8 {
17namespace internal {
18namespace compiler {
19
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000020namespace {
21
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022Decision DecideCondition(Node* const cond) {
23 switch (cond->opcode()) {
24 case IrOpcode::kInt32Constant: {
25 Int32Matcher mcond(cond);
26 return mcond.Value() ? Decision::kTrue : Decision::kFalse;
27 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028 case IrOpcode::kHeapConstant: {
29 HeapObjectMatcher mcond(cond);
30 return mcond.Value()->BooleanValue() ? Decision::kTrue : Decision::kFalse;
31 }
32 default:
33 return Decision::kUnknown;
34 }
35}
36
37} // namespace
38
39
40CommonOperatorReducer::CommonOperatorReducer(Editor* editor, Graph* graph,
41 CommonOperatorBuilder* common,
42 MachineOperatorBuilder* machine)
43 : AdvancedReducer(editor),
44 graph_(graph),
45 common_(common),
46 machine_(machine),
47 dead_(graph->NewNode(common->Dead())) {}
48
49
Emily Bernierd0a1eb72015-03-24 16:35:39 -040050Reduction CommonOperatorReducer::Reduce(Node* node) {
51 switch (node->opcode()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000052 case IrOpcode::kBranch:
53 return ReduceBranch(node);
Ben Murdochda12d292016-06-02 14:46:10 +010054 case IrOpcode::kDeoptimizeIf:
55 case IrOpcode::kDeoptimizeUnless:
56 return ReduceDeoptimizeConditional(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000057 case IrOpcode::kMerge:
58 return ReduceMerge(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040059 case IrOpcode::kEffectPhi:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000060 return ReduceEffectPhi(node);
61 case IrOpcode::kPhi:
62 return ReducePhi(node);
63 case IrOpcode::kReturn:
64 return ReduceReturn(node);
65 case IrOpcode::kSelect:
66 return ReduceSelect(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000067 default:
68 break;
69 }
70 return NoChange();
71}
72
73
74Reduction CommonOperatorReducer::ReduceBranch(Node* node) {
75 DCHECK_EQ(IrOpcode::kBranch, node->opcode());
76 Node* const cond = node->InputAt(0);
77 // Swap IfTrue/IfFalse on {branch} if {cond} is a BooleanNot and use the input
78 // to BooleanNot as new condition for {branch}. Note we assume that {cond} was
79 // already properly optimized before we get here (as guaranteed by the graph
80 // reduction logic).
81 if (cond->opcode() == IrOpcode::kBooleanNot) {
82 for (Node* const use : node->uses()) {
83 switch (use->opcode()) {
84 case IrOpcode::kIfTrue:
85 NodeProperties::ChangeOp(use, common()->IfFalse());
86 break;
87 case IrOpcode::kIfFalse:
88 NodeProperties::ChangeOp(use, common()->IfTrue());
89 break;
90 default:
91 UNREACHABLE();
92 }
93 }
94 // Update the condition of {branch}. No need to mark the uses for revisit,
95 // since we tell the graph reducer that the {branch} was changed and the
96 // graph reduction logic will ensure that the uses are revisited properly.
97 node->ReplaceInput(0, cond->InputAt(0));
98 // Negate the hint for {branch}.
99 NodeProperties::ChangeOp(
100 node, common()->Branch(NegateBranchHint(BranchHintOf(node->op()))));
101 return Changed(node);
102 }
103 Decision const decision = DecideCondition(cond);
104 if (decision == Decision::kUnknown) return NoChange();
105 Node* const control = node->InputAt(1);
106 for (Node* const use : node->uses()) {
107 switch (use->opcode()) {
108 case IrOpcode::kIfTrue:
109 Replace(use, (decision == Decision::kTrue) ? control : dead());
110 break;
111 case IrOpcode::kIfFalse:
112 Replace(use, (decision == Decision::kFalse) ? control : dead());
113 break;
114 default:
115 UNREACHABLE();
116 }
117 }
118 return Replace(dead());
119}
120
Ben Murdochda12d292016-06-02 14:46:10 +0100121Reduction CommonOperatorReducer::ReduceDeoptimizeConditional(Node* node) {
122 DCHECK(node->opcode() == IrOpcode::kDeoptimizeIf ||
123 node->opcode() == IrOpcode::kDeoptimizeUnless);
124 bool condition_is_true = node->opcode() == IrOpcode::kDeoptimizeUnless;
125 Node* condition = NodeProperties::GetValueInput(node, 0);
126 Node* frame_state = NodeProperties::GetValueInput(node, 1);
127 Node* effect = NodeProperties::GetEffectInput(node);
128 Node* control = NodeProperties::GetControlInput(node);
129 // Swap DeoptimizeIf/DeoptimizeUnless on {node} if {cond} is a BooleaNot
130 // and use the input to BooleanNot as new condition for {node}. Note we
131 // assume that {cond} was already properly optimized before we get here
132 // (as guaranteed by the graph reduction logic).
133 if (condition->opcode() == IrOpcode::kBooleanNot) {
134 NodeProperties::ReplaceValueInput(node, condition->InputAt(0), 0);
135 NodeProperties::ChangeOp(node, condition_is_true
136 ? common()->DeoptimizeIf()
137 : common()->DeoptimizeUnless());
138 return Changed(node);
139 }
140 Decision const decision = DecideCondition(condition);
141 if (decision == Decision::kUnknown) return NoChange();
142 if (condition_is_true == (decision == Decision::kTrue)) {
Ben Murdoch61f157c2016-09-16 13:49:30 +0100143 ReplaceWithValue(node, dead(), effect, control);
144 } else {
145 control = graph()->NewNode(common()->Deoptimize(DeoptimizeKind::kEager),
146 frame_state, effect, control);
147 // TODO(bmeurer): This should be on the AdvancedReducer somehow.
148 NodeProperties::MergeControlToEnd(graph(), common(), control);
149 Revisit(graph()->end());
Ben Murdochda12d292016-06-02 14:46:10 +0100150 }
Ben Murdochda12d292016-06-02 14:46:10 +0100151 return Replace(dead());
152}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000153
154Reduction CommonOperatorReducer::ReduceMerge(Node* node) {
155 DCHECK_EQ(IrOpcode::kMerge, node->opcode());
156 //
157 // Check if this is a merge that belongs to an unused diamond, which means
158 // that:
159 //
160 // a) the {Merge} has no {Phi} or {EffectPhi} uses, and
161 // b) the {Merge} has two inputs, one {IfTrue} and one {IfFalse}, which are
162 // both owned by the Merge, and
163 // c) and the {IfTrue} and {IfFalse} nodes point to the same {Branch}.
164 //
165 if (node->InputCount() == 2) {
166 for (Node* const use : node->uses()) {
167 if (IrOpcode::IsPhiOpcode(use->opcode())) return NoChange();
168 }
169 Node* if_true = node->InputAt(0);
170 Node* if_false = node->InputAt(1);
171 if (if_true->opcode() != IrOpcode::kIfTrue) std::swap(if_true, if_false);
172 if (if_true->opcode() == IrOpcode::kIfTrue &&
173 if_false->opcode() == IrOpcode::kIfFalse &&
174 if_true->InputAt(0) == if_false->InputAt(0) && if_true->OwnedBy(node) &&
175 if_false->OwnedBy(node)) {
176 Node* const branch = if_true->InputAt(0);
177 DCHECK_EQ(IrOpcode::kBranch, branch->opcode());
178 DCHECK(branch->OwnedBy(if_true, if_false));
179 Node* const control = branch->InputAt(1);
180 // Mark the {branch} as {Dead}.
181 branch->TrimInputCount(0);
182 NodeProperties::ChangeOp(branch, common()->Dead());
183 return Replace(control);
184 }
185 }
186 return NoChange();
187}
188
189
190Reduction CommonOperatorReducer::ReduceEffectPhi(Node* node) {
191 DCHECK_EQ(IrOpcode::kEffectPhi, node->opcode());
192 int const input_count = node->InputCount() - 1;
193 DCHECK_LE(1, input_count);
194 Node* const merge = node->InputAt(input_count);
195 DCHECK(IrOpcode::IsMergeOpcode(merge->opcode()));
196 DCHECK_EQ(input_count, merge->InputCount());
197 Node* const effect = node->InputAt(0);
198 DCHECK_NE(node, effect);
199 for (int i = 1; i < input_count; ++i) {
200 Node* const input = node->InputAt(i);
201 if (input == node) {
202 // Ignore redundant inputs.
203 DCHECK_EQ(IrOpcode::kLoop, merge->opcode());
204 continue;
205 }
206 if (input != effect) return NoChange();
207 }
208 // We might now be able to further reduce the {merge} node.
209 Revisit(merge);
210 return Replace(effect);
211}
212
213
214Reduction CommonOperatorReducer::ReducePhi(Node* node) {
215 DCHECK_EQ(IrOpcode::kPhi, node->opcode());
216 int const input_count = node->InputCount() - 1;
217 DCHECK_LE(1, input_count);
218 Node* const merge = node->InputAt(input_count);
219 DCHECK(IrOpcode::IsMergeOpcode(merge->opcode()));
220 DCHECK_EQ(input_count, merge->InputCount());
221 if (input_count == 2) {
222 Node* vtrue = node->InputAt(0);
223 Node* vfalse = node->InputAt(1);
224 Node* if_true = merge->InputAt(0);
225 Node* if_false = merge->InputAt(1);
226 if (if_true->opcode() != IrOpcode::kIfTrue) {
227 std::swap(if_true, if_false);
228 std::swap(vtrue, vfalse);
229 }
230 if (if_true->opcode() == IrOpcode::kIfTrue &&
231 if_false->opcode() == IrOpcode::kIfFalse &&
232 if_true->InputAt(0) == if_false->InputAt(0)) {
233 Node* const branch = if_true->InputAt(0);
234 // Check that the branch is not dead already.
235 if (branch->opcode() != IrOpcode::kBranch) return NoChange();
236 Node* const cond = branch->InputAt(0);
237 if (cond->opcode() == IrOpcode::kFloat32LessThan) {
238 Float32BinopMatcher mcond(cond);
239 if (mcond.left().Is(0.0) && mcond.right().Equals(vtrue) &&
240 vfalse->opcode() == IrOpcode::kFloat32Sub) {
241 Float32BinopMatcher mvfalse(vfalse);
242 if (mvfalse.left().IsZero() && mvfalse.right().Equals(vtrue)) {
243 // We might now be able to further reduce the {merge} node.
244 Revisit(merge);
245 return Change(node, machine()->Float32Abs(), vtrue);
246 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400247 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000248 if (mcond.left().Equals(vtrue) && mcond.right().Equals(vfalse) &&
249 machine()->Float32Min().IsSupported()) {
250 // We might now be able to further reduce the {merge} node.
251 Revisit(merge);
252 return Change(node, machine()->Float32Min().op(), vtrue, vfalse);
253 } else if (mcond.left().Equals(vfalse) && mcond.right().Equals(vtrue) &&
254 machine()->Float32Max().IsSupported()) {
255 // We might now be able to further reduce the {merge} node.
256 Revisit(merge);
257 return Change(node, machine()->Float32Max().op(), vtrue, vfalse);
258 }
259 } else if (cond->opcode() == IrOpcode::kFloat64LessThan) {
260 Float64BinopMatcher mcond(cond);
261 if (mcond.left().Is(0.0) && mcond.right().Equals(vtrue) &&
262 vfalse->opcode() == IrOpcode::kFloat64Sub) {
263 Float64BinopMatcher mvfalse(vfalse);
264 if (mvfalse.left().IsZero() && mvfalse.right().Equals(vtrue)) {
265 // We might now be able to further reduce the {merge} node.
266 Revisit(merge);
267 return Change(node, machine()->Float64Abs(), vtrue);
268 }
269 }
270 if (mcond.left().Equals(vtrue) && mcond.right().Equals(vfalse) &&
271 machine()->Float64Min().IsSupported()) {
272 // We might now be able to further reduce the {merge} node.
273 Revisit(merge);
274 return Change(node, machine()->Float64Min().op(), vtrue, vfalse);
275 } else if (mcond.left().Equals(vfalse) && mcond.right().Equals(vtrue) &&
276 machine()->Float64Max().IsSupported()) {
277 // We might now be able to further reduce the {merge} node.
278 Revisit(merge);
279 return Change(node, machine()->Float64Max().op(), vtrue, vfalse);
280 }
281 }
282 }
283 }
284 Node* const value = node->InputAt(0);
285 DCHECK_NE(node, value);
286 for (int i = 1; i < input_count; ++i) {
287 Node* const input = node->InputAt(i);
288 if (input == node) {
289 // Ignore redundant inputs.
290 DCHECK_EQ(IrOpcode::kLoop, merge->opcode());
291 continue;
292 }
293 if (input != value) return NoChange();
294 }
295 // We might now be able to further reduce the {merge} node.
296 Revisit(merge);
297 return Replace(value);
298}
299
300
301Reduction CommonOperatorReducer::ReduceReturn(Node* node) {
302 DCHECK_EQ(IrOpcode::kReturn, node->opcode());
303 Node* const value = node->InputAt(0);
304 Node* const effect = node->InputAt(1);
305 Node* const control = node->InputAt(2);
306 if (value->opcode() == IrOpcode::kPhi &&
307 NodeProperties::GetControlInput(value) == control &&
308 effect->opcode() == IrOpcode::kEffectPhi &&
309 NodeProperties::GetControlInput(effect) == control &&
310 control->opcode() == IrOpcode::kMerge) {
311 int const control_input_count = control->InputCount();
312 DCHECK_NE(0, control_input_count);
313 DCHECK_EQ(control_input_count, value->InputCount() - 1);
314 DCHECK_EQ(control_input_count, effect->InputCount() - 1);
315 DCHECK_EQ(IrOpcode::kEnd, graph()->end()->opcode());
316 DCHECK_NE(0, graph()->end()->InputCount());
317 for (int i = 0; i < control_input_count; ++i) {
318 // Create a new {Return} and connect it to {end}. We don't need to mark
319 // {end} as revisit, because we mark {node} as {Dead} below, which was
320 // previously connected to {end}, so we know for sure that at some point
321 // the reducer logic will visit {end} again.
322 Node* ret = graph()->NewNode(common()->Return(), value->InputAt(i),
323 effect->InputAt(i), control->InputAt(i));
324 NodeProperties::MergeControlToEnd(graph(), common(), ret);
325 }
326 // Mark the merge {control} and return {node} as {dead}.
327 Replace(control, dead());
328 return Replace(dead());
329 }
330 return NoChange();
331}
332
333
334Reduction CommonOperatorReducer::ReduceSelect(Node* node) {
335 DCHECK_EQ(IrOpcode::kSelect, node->opcode());
336 Node* const cond = node->InputAt(0);
337 Node* const vtrue = node->InputAt(1);
338 Node* const vfalse = node->InputAt(2);
339 if (vtrue == vfalse) return Replace(vtrue);
340 switch (DecideCondition(cond)) {
341 case Decision::kTrue:
342 return Replace(vtrue);
343 case Decision::kFalse:
344 return Replace(vfalse);
345 case Decision::kUnknown:
346 break;
347 }
348 switch (cond->opcode()) {
349 case IrOpcode::kFloat32LessThan: {
350 Float32BinopMatcher mcond(cond);
351 if (mcond.left().Is(0.0) && mcond.right().Equals(vtrue) &&
352 vfalse->opcode() == IrOpcode::kFloat32Sub) {
353 Float32BinopMatcher mvfalse(vfalse);
354 if (mvfalse.left().IsZero() && mvfalse.right().Equals(vtrue)) {
355 return Change(node, machine()->Float32Abs(), vtrue);
356 }
357 }
358 if (mcond.left().Equals(vtrue) && mcond.right().Equals(vfalse) &&
359 machine()->Float32Min().IsSupported()) {
360 return Change(node, machine()->Float32Min().op(), vtrue, vfalse);
361 } else if (mcond.left().Equals(vfalse) && mcond.right().Equals(vtrue) &&
362 machine()->Float32Max().IsSupported()) {
363 return Change(node, machine()->Float32Max().op(), vtrue, vfalse);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400364 }
365 break;
366 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000367 case IrOpcode::kFloat64LessThan: {
368 Float64BinopMatcher mcond(cond);
369 if (mcond.left().Is(0.0) && mcond.right().Equals(vtrue) &&
370 vfalse->opcode() == IrOpcode::kFloat64Sub) {
371 Float64BinopMatcher mvfalse(vfalse);
372 if (mvfalse.left().IsZero() && mvfalse.right().Equals(vtrue)) {
373 return Change(node, machine()->Float64Abs(), vtrue);
374 }
375 }
376 if (mcond.left().Equals(vtrue) && mcond.right().Equals(vfalse) &&
377 machine()->Float64Min().IsSupported()) {
378 return Change(node, machine()->Float64Min().op(), vtrue, vfalse);
379 } else if (mcond.left().Equals(vfalse) && mcond.right().Equals(vtrue) &&
380 machine()->Float64Max().IsSupported()) {
381 return Change(node, machine()->Float64Max().op(), vtrue, vfalse);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400382 }
383 break;
384 }
385 default:
386 break;
387 }
388 return NoChange();
389}
390
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000391
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000392Reduction CommonOperatorReducer::Change(Node* node, Operator const* op,
393 Node* a) {
394 node->ReplaceInput(0, a);
395 node->TrimInputCount(1);
396 NodeProperties::ChangeOp(node, op);
397 return Changed(node);
398}
399
400
401Reduction CommonOperatorReducer::Change(Node* node, Operator const* op, Node* a,
402 Node* b) {
403 node->ReplaceInput(0, a);
404 node->ReplaceInput(1, b);
405 node->TrimInputCount(2);
406 NodeProperties::ChangeOp(node, op);
407 return Changed(node);
408}
409
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400410} // namespace compiler
411} // namespace internal
412} // namespace v8