blob: 5b956f077a9657821f21b2aa5e1614ae249f8781 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/move-optimizer.h"
6#include "test/unittests/compiler/instruction-sequence-unittest.h"
7
8namespace v8 {
9namespace internal {
10namespace compiler {
11
12class MoveOptimizerTest : public InstructionSequenceTest {
13 public:
14 GapInstruction* LastGap() {
15 auto instruction = sequence()->instructions().back();
16 if (!instruction->IsGapMoves()) {
17 instruction = *(sequence()->instructions().rbegin() + 1);
18 }
19 return GapInstruction::cast(instruction);
20 }
21
22 void AddMove(GapInstruction* gap, TestOperand from, TestOperand to,
23 GapInstruction::InnerPosition pos = GapInstruction::START) {
24 auto parallel_move = gap->GetOrCreateParallelMove(pos, zone());
25 parallel_move->AddMove(ConvertMoveArg(from), ConvertMoveArg(to), zone());
26 }
27
28 int NonRedundantSize(ParallelMove* move) {
29 int i = 0;
30 auto ops = move->move_operands();
31 for (auto op = ops->begin(); op != ops->end(); ++op) {
32 if (op->IsRedundant()) continue;
33 i++;
34 }
35 return i;
36 }
37
38 bool Contains(ParallelMove* move, TestOperand from_op, TestOperand to_op) {
39 auto from = ConvertMoveArg(from_op);
40 auto to = ConvertMoveArg(to_op);
41 auto ops = move->move_operands();
42 for (auto op = ops->begin(); op != ops->end(); ++op) {
43 if (op->IsRedundant()) continue;
44 if (op->source()->Equals(from) && op->destination()->Equals(to)) {
45 return true;
46 }
47 }
48 return false;
49 }
50
51 // TODO(dcarney): add a verifier.
52 void Optimize() {
53 WireBlocks();
54 if (FLAG_trace_turbo) {
55 OFStream os(stdout);
56 PrintableInstructionSequence printable = {config(), sequence()};
57 os << "----- Instruction sequence before move optimization -----\n"
58 << printable;
59 }
60 MoveOptimizer move_optimizer(zone(), sequence());
61 move_optimizer.Run();
62 if (FLAG_trace_turbo) {
63 OFStream os(stdout);
64 PrintableInstructionSequence printable = {config(), sequence()};
65 os << "----- Instruction sequence after move optimization -----\n"
66 << printable;
67 }
68 }
69
70 private:
71 InstructionOperand* ConvertMoveArg(TestOperand op) {
72 CHECK_EQ(kNoValue, op.vreg_.value_);
73 CHECK_NE(kNoValue, op.value_);
74 switch (op.type_) {
75 case kConstant:
76 return ConstantOperand::Create(op.value_, zone());
77 case kFixedSlot:
78 return StackSlotOperand::Create(op.value_, zone());
79 case kFixedRegister:
80 CHECK(0 <= op.value_ && op.value_ < num_general_registers());
81 return RegisterOperand::Create(op.value_, zone());
82 default:
83 break;
84 }
85 CHECK(false);
86 return nullptr;
87 }
88};
89
90
91TEST_F(MoveOptimizerTest, RemovesRedundant) {
92 StartBlock();
93 AddMove(LastGap(), Reg(0), Reg(1));
94 EmitNop();
95 AddMove(LastGap(), Reg(1), Reg(0));
96 EmitNop();
97 EndBlock(Last());
98
99 Optimize();
100
101 auto gap = LastGap();
102 auto move = gap->parallel_moves()[0];
103 CHECK_EQ(1, NonRedundantSize(move));
104 CHECK(Contains(move, Reg(0), Reg(1)));
105}
106
107
108TEST_F(MoveOptimizerTest, SplitsConstants) {
109 StartBlock();
110 EndBlock(Last());
111
112 auto gap = LastGap();
113 AddMove(gap, Const(1), Slot(0));
114 AddMove(gap, Const(1), Slot(1));
115 AddMove(gap, Const(1), Reg(0));
116 AddMove(gap, Const(1), Slot(2));
117
118 Optimize();
119
120 auto move = gap->parallel_moves()[0];
121 CHECK_EQ(1, NonRedundantSize(move));
122 CHECK(Contains(move, Const(1), Reg(0)));
123
124 move = gap->parallel_moves()[1];
125 CHECK_EQ(3, NonRedundantSize(move));
126 CHECK(Contains(move, Reg(0), Slot(0)));
127 CHECK(Contains(move, Reg(0), Slot(1)));
128 CHECK(Contains(move, Reg(0), Slot(2)));
129}
130
131} // namespace compiler
132} // namespace internal
133} // namespace v8