blob: 5ccd0c67277ba2d7197f639c66f533a6b6b5382f [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/move-optimizer.h"
Ben Murdoch097c5b22016-05-18 11:27:45 +01006#include "src/compiler/pipeline.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04007#include "test/unittests/compiler/instruction-sequence-unittest.h"
8
9namespace v8 {
10namespace internal {
11namespace compiler {
12
13class MoveOptimizerTest : public InstructionSequenceTest {
14 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000015 Instruction* LastInstruction() { return sequence()->instructions().back(); }
16
17 void AddMove(Instruction* instr, TestOperand from, TestOperand to,
18 Instruction::GapPosition pos = Instruction::START) {
19 auto parallel_move = instr->GetOrCreateParallelMove(pos, zone());
20 parallel_move->AddMove(ConvertMoveArg(from), ConvertMoveArg(to));
Emily Bernierd0a1eb72015-03-24 16:35:39 -040021 }
22
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000023 int NonRedundantSize(ParallelMove* moves) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040024 int i = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000025 for (auto move : *moves) {
26 if (move->IsRedundant()) continue;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040027 i++;
28 }
29 return i;
30 }
31
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 bool Contains(ParallelMove* moves, TestOperand from_op, TestOperand to_op) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040033 auto from = ConvertMoveArg(from_op);
34 auto to = ConvertMoveArg(to_op);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000035 for (auto move : *moves) {
36 if (move->IsRedundant()) continue;
37 if (move->source().Equals(from) && move->destination().Equals(to)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040038 return true;
39 }
40 }
41 return false;
42 }
43
44 // TODO(dcarney): add a verifier.
45 void Optimize() {
46 WireBlocks();
47 if (FLAG_trace_turbo) {
48 OFStream os(stdout);
49 PrintableInstructionSequence printable = {config(), sequence()};
50 os << "----- Instruction sequence before move optimization -----\n"
51 << printable;
52 }
53 MoveOptimizer move_optimizer(zone(), sequence());
54 move_optimizer.Run();
55 if (FLAG_trace_turbo) {
56 OFStream os(stdout);
57 PrintableInstructionSequence printable = {config(), sequence()};
58 os << "----- Instruction sequence after move optimization -----\n"
59 << printable;
60 }
61 }
62
63 private:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000064 InstructionOperand ConvertMoveArg(TestOperand op) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040065 CHECK_EQ(kNoValue, op.vreg_.value_);
66 CHECK_NE(kNoValue, op.value_);
67 switch (op.type_) {
68 case kConstant:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000069 return ConstantOperand(op.value_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040070 case kFixedSlot:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 return AllocatedOperand(LocationOperand::STACK_SLOT,
72 MachineRepresentation::kWord32, op.value_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040073 case kFixedRegister:
74 CHECK(0 <= op.value_ && op.value_ < num_general_registers());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000075 return AllocatedOperand(LocationOperand::REGISTER,
76 MachineRepresentation::kWord32, op.value_);
77 case kExplicit:
78 CHECK(0 <= op.value_ && op.value_ < num_general_registers());
79 return ExplicitOperand(LocationOperand::REGISTER,
80 MachineRepresentation::kWord32, op.value_);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040081 default:
82 break;
83 }
84 CHECK(false);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000085 return InstructionOperand();
Emily Bernierd0a1eb72015-03-24 16:35:39 -040086 }
87};
88
89
90TEST_F(MoveOptimizerTest, RemovesRedundant) {
91 StartBlock();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000092 auto first_instr = EmitNop();
93 AddMove(first_instr, Reg(0), Reg(1));
94 auto last_instr = EmitNop();
95 AddMove(last_instr, Reg(1), Reg(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -040096 EndBlock(Last());
97
98 Optimize();
99
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000100 CHECK_EQ(0, NonRedundantSize(first_instr->parallel_moves()[0]));
101 auto move = last_instr->parallel_moves()[0];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400102 CHECK_EQ(1, NonRedundantSize(move));
103 CHECK(Contains(move, Reg(0), Reg(1)));
104}
105
106
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000107TEST_F(MoveOptimizerTest, RemovesRedundantExplicit) {
108 int first_reg_index =
109 RegisterConfiguration::ArchDefault(RegisterConfiguration::TURBOFAN)
110 ->GetAllocatableGeneralCode(0);
111 int second_reg_index =
112 RegisterConfiguration::ArchDefault(RegisterConfiguration::TURBOFAN)
113 ->GetAllocatableGeneralCode(1);
114
115 StartBlock();
116 auto first_instr = EmitNop();
117 AddMove(first_instr, Reg(first_reg_index), ExplicitReg(second_reg_index));
118 auto last_instr = EmitNop();
119 AddMove(last_instr, Reg(second_reg_index), Reg(first_reg_index));
120 EndBlock(Last());
121
122 Optimize();
123
124 CHECK_EQ(0, NonRedundantSize(first_instr->parallel_moves()[0]));
125 auto move = last_instr->parallel_moves()[0];
126 CHECK_EQ(1, NonRedundantSize(move));
127 CHECK(Contains(move, Reg(first_reg_index), ExplicitReg(second_reg_index)));
128}
129
130
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400131TEST_F(MoveOptimizerTest, SplitsConstants) {
132 StartBlock();
133 EndBlock(Last());
134
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000135 auto gap = LastInstruction();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400136 AddMove(gap, Const(1), Slot(0));
137 AddMove(gap, Const(1), Slot(1));
138 AddMove(gap, Const(1), Reg(0));
139 AddMove(gap, Const(1), Slot(2));
140
141 Optimize();
142
143 auto move = gap->parallel_moves()[0];
144 CHECK_EQ(1, NonRedundantSize(move));
145 CHECK(Contains(move, Const(1), Reg(0)));
146
147 move = gap->parallel_moves()[1];
148 CHECK_EQ(3, NonRedundantSize(move));
149 CHECK(Contains(move, Reg(0), Slot(0)));
150 CHECK(Contains(move, Reg(0), Slot(1)));
151 CHECK(Contains(move, Reg(0), Slot(2)));
152}
153
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000154
155TEST_F(MoveOptimizerTest, SimpleMerge) {
156 StartBlock();
157 EndBlock(Branch(Imm(), 1, 2));
158
159 StartBlock();
160 EndBlock(Jump(2));
161 AddMove(LastInstruction(), Reg(0), Reg(1));
162
163 StartBlock();
164 EndBlock(Jump(1));
165 AddMove(LastInstruction(), Reg(0), Reg(1));
166
167 StartBlock();
168 EndBlock(Last());
169
170 auto last = LastInstruction();
171
172 Optimize();
173
174 auto move = last->parallel_moves()[0];
175 CHECK_EQ(1, NonRedundantSize(move));
176 CHECK(Contains(move, Reg(0), Reg(1)));
177}
178
179
180TEST_F(MoveOptimizerTest, SimpleMergeCycle) {
181 StartBlock();
182 EndBlock(Branch(Imm(), 1, 2));
183
184 StartBlock();
185 EndBlock(Jump(2));
186 auto gap_0 = LastInstruction();
187 AddMove(gap_0, Reg(0), Reg(1));
188 AddMove(LastInstruction(), Reg(1), Reg(0));
189
190 StartBlock();
191 EndBlock(Jump(1));
192 auto gap_1 = LastInstruction();
193 AddMove(gap_1, Reg(0), Reg(1));
194 AddMove(gap_1, Reg(1), Reg(0));
195
196 StartBlock();
197 EndBlock(Last());
198
199 auto last = LastInstruction();
200
201 Optimize();
202
203 CHECK(gap_0->AreMovesRedundant());
204 CHECK(gap_1->AreMovesRedundant());
205 auto move = last->parallel_moves()[0];
206 CHECK_EQ(2, NonRedundantSize(move));
207 CHECK(Contains(move, Reg(0), Reg(1)));
208 CHECK(Contains(move, Reg(1), Reg(0)));
209}
210
211
212TEST_F(MoveOptimizerTest, GapsCanMoveOverInstruction) {
213 StartBlock();
214 int const_index = 1;
215 DefineConstant(const_index);
216 Instruction* ctant_def = LastInstruction();
217 AddMove(ctant_def, Reg(1), Reg(0));
218
219 Instruction* last = EmitNop();
220 AddMove(last, Const(const_index), Reg(0));
221 AddMove(last, Reg(0), Reg(1));
222 EndBlock(Last());
223 Optimize();
224
225 ParallelMove* inst1_start =
226 ctant_def->GetParallelMove(Instruction::GapPosition::START);
227 ParallelMove* inst1_end =
228 ctant_def->GetParallelMove(Instruction::GapPosition::END);
229 ParallelMove* last_start =
230 last->GetParallelMove(Instruction::GapPosition::START);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100231 CHECK(inst1_start == nullptr || NonRedundantSize(inst1_start) == 0);
232 CHECK(inst1_end == nullptr || NonRedundantSize(inst1_end) == 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000233 CHECK(last_start->size() == 2);
234 int redundants = 0;
235 int assignment = 0;
236 for (MoveOperands* move : *last_start) {
237 if (move->IsRedundant()) {
238 ++redundants;
239 } else {
240 ++assignment;
241 CHECK(move->destination().IsRegister());
242 CHECK(move->source().IsConstant());
243 }
244 }
245 CHECK_EQ(1, redundants);
246 CHECK_EQ(1, assignment);
247}
248
249
Ben Murdoch097c5b22016-05-18 11:27:45 +0100250TEST_F(MoveOptimizerTest, SubsetMovesMerge) {
251 StartBlock();
252 EndBlock(Branch(Imm(), 1, 2));
253
254 StartBlock();
255 EndBlock(Jump(2));
256 Instruction* last_move_b1 = LastInstruction();
257 AddMove(last_move_b1, Reg(0), Reg(1));
258 AddMove(last_move_b1, Reg(2), Reg(3));
259
260 StartBlock();
261 EndBlock(Jump(1));
262 Instruction* last_move_b2 = LastInstruction();
263 AddMove(last_move_b2, Reg(0), Reg(1));
264 AddMove(last_move_b2, Reg(4), Reg(5));
265
266 StartBlock();
267 EndBlock(Last());
268
269 Instruction* last = LastInstruction();
270
271 Optimize();
272
273 ParallelMove* last_move = last->parallel_moves()[0];
274 CHECK_EQ(1, NonRedundantSize(last_move));
275 CHECK(Contains(last_move, Reg(0), Reg(1)));
276
277 ParallelMove* b1_move = last_move_b1->parallel_moves()[0];
278 CHECK_EQ(1, NonRedundantSize(b1_move));
279 CHECK(Contains(b1_move, Reg(2), Reg(3)));
280
281 ParallelMove* b2_move = last_move_b2->parallel_moves()[0];
282 CHECK_EQ(1, NonRedundantSize(b2_move));
283 CHECK(Contains(b2_move, Reg(4), Reg(5)));
284}
285
286
287TEST_F(MoveOptimizerTest, GapConflictSubsetMovesDoNotMerge) {
288 StartBlock();
289 EndBlock(Branch(Imm(), 1, 2));
290
291 StartBlock();
292 EndBlock(Jump(2));
293 Instruction* last_move_b1 = LastInstruction();
294 AddMove(last_move_b1, Reg(0), Reg(1));
295 AddMove(last_move_b1, Reg(2), Reg(0));
296 AddMove(last_move_b1, Reg(4), Reg(5));
297
298 StartBlock();
299 EndBlock(Jump(1));
300 Instruction* last_move_b2 = LastInstruction();
301 AddMove(last_move_b2, Reg(0), Reg(1));
302 AddMove(last_move_b2, Reg(4), Reg(5));
303
304 StartBlock();
305 EndBlock(Last());
306
307 Instruction* last = LastInstruction();
308
309 Optimize();
310
311 ParallelMove* last_move = last->parallel_moves()[0];
312 CHECK_EQ(1, NonRedundantSize(last_move));
313 CHECK(Contains(last_move, Reg(4), Reg(5)));
314
315 ParallelMove* b1_move = last_move_b1->parallel_moves()[0];
316 CHECK_EQ(2, NonRedundantSize(b1_move));
317 CHECK(Contains(b1_move, Reg(0), Reg(1)));
318 CHECK(Contains(b1_move, Reg(2), Reg(0)));
319
320 ParallelMove* b2_move = last_move_b2->parallel_moves()[0];
321 CHECK_EQ(1, NonRedundantSize(b2_move));
322 CHECK(Contains(b1_move, Reg(0), Reg(1)));
323}
324
325TEST_F(MoveOptimizerTest, ClobberedDestinationsAreEliminated) {
326 StartBlock();
327 EmitNop();
328 Instruction* first_instr = LastInstruction();
329 AddMove(first_instr, Reg(0), Reg(1));
330 EmitOI(Reg(1), 0, nullptr);
331 Instruction* last_instr = LastInstruction();
332 EndBlock();
333 Optimize();
334
335 ParallelMove* first_move = first_instr->parallel_moves()[0];
336 CHECK_EQ(0, NonRedundantSize(first_move));
337
338 ParallelMove* last_move = last_instr->parallel_moves()[0];
339 CHECK_EQ(0, NonRedundantSize(last_move));
340}
341
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400342} // namespace compiler
343} // namespace internal
344} // namespace v8