blob: c7432c6ea5dbaa678be67f6fb4a9e048b68dd991 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/pipeline.h"
6
Emily Bernierd0a1eb72015-03-24 16:35:39 -04007#include <fstream> // NOLINT(readability/streams)
8#include <sstream>
9
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/base/platform/elapsed-timer.h"
11#include "src/compiler/ast-graph-builder.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040012#include "src/compiler/ast-loop-assignment-analyzer.h"
13#include "src/compiler/basic-block-instrumentor.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014#include "src/compiler/change-lowering.h"
15#include "src/compiler/code-generator.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040016#include "src/compiler/common-operator-reducer.h"
17#include "src/compiler/control-reducer.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000018#include "src/compiler/graph-replay.h"
19#include "src/compiler/graph-visualizer.h"
20#include "src/compiler/instruction.h"
21#include "src/compiler/instruction-selector.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040022#include "src/compiler/js-builtin-reducer.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000023#include "src/compiler/js-context-specialization.h"
24#include "src/compiler/js-generic-lowering.h"
25#include "src/compiler/js-inlining.h"
26#include "src/compiler/js-typed-lowering.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040027#include "src/compiler/jump-threading.h"
28#include "src/compiler/load-elimination.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000029#include "src/compiler/machine-operator-reducer.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040030#include "src/compiler/move-optimizer.h"
31#include "src/compiler/pipeline-statistics.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000032#include "src/compiler/register-allocator.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040033#include "src/compiler/register-allocator-verifier.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034#include "src/compiler/schedule.h"
35#include "src/compiler/scheduler.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040036#include "src/compiler/select-lowering.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000037#include "src/compiler/simplified-lowering.h"
38#include "src/compiler/simplified-operator-reducer.h"
39#include "src/compiler/typer.h"
40#include "src/compiler/value-numbering-reducer.h"
41#include "src/compiler/verifier.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040042#include "src/compiler/zone-pool.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043#include "src/ostreams.h"
44#include "src/utils.h"
45
46namespace v8 {
47namespace internal {
48namespace compiler {
49
Emily Bernierd0a1eb72015-03-24 16:35:39 -040050class PipelineData {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000051 public:
Emily Bernierd0a1eb72015-03-24 16:35:39 -040052 explicit PipelineData(ZonePool* zone_pool, CompilationInfo* info)
53 : isolate_(info->zone()->isolate()),
54 info_(info),
55 outer_zone_(nullptr),
56 zone_pool_(zone_pool),
57 pipeline_statistics_(nullptr),
58 compilation_failed_(false),
59 code_(Handle<Code>::null()),
60 graph_zone_scope_(zone_pool_),
61 graph_zone_(nullptr),
62 graph_(nullptr),
63 loop_assignment_(nullptr),
64 machine_(nullptr),
65 common_(nullptr),
66 javascript_(nullptr),
67 jsgraph_(nullptr),
68 typer_(nullptr),
69 context_node_(nullptr),
70 schedule_(nullptr),
71 instruction_zone_scope_(zone_pool_),
72 instruction_zone_(nullptr),
73 sequence_(nullptr),
74 frame_(nullptr),
75 register_allocator_(nullptr) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000076
Emily Bernierd0a1eb72015-03-24 16:35:39 -040077 ~PipelineData() {
78 DeleteInstructionZone();
79 DeleteGraphZone();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000080 }
81
Emily Bernierd0a1eb72015-03-24 16:35:39 -040082 // For main entry point.
83 void Initialize(PipelineStatistics* pipeline_statistics) {
84 PhaseScope scope(pipeline_statistics, "init pipeline data");
85 outer_zone_ = info()->zone();
86 pipeline_statistics_ = pipeline_statistics;
87 graph_zone_ = graph_zone_scope_.zone();
88 graph_ = new (graph_zone()) Graph(graph_zone());
89 source_positions_.Reset(new SourcePositionTable(graph()));
90 machine_ = new (graph_zone()) MachineOperatorBuilder(
91 graph_zone(), kMachPtr,
92 InstructionSelector::SupportedMachineOperatorFlags());
93 common_ = new (graph_zone()) CommonOperatorBuilder(graph_zone());
94 javascript_ = new (graph_zone()) JSOperatorBuilder(graph_zone());
95 jsgraph_ =
96 new (graph_zone()) JSGraph(graph(), common(), javascript(), machine());
97 typer_.Reset(new Typer(graph(), info()->context()));
98 instruction_zone_ = instruction_zone_scope_.zone();
99 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000100
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400101 // For machine graph testing entry point.
102 void InitializeTorTesting(Graph* graph, Schedule* schedule) {
103 graph_ = graph;
104 source_positions_.Reset(new SourcePositionTable(graph));
105 schedule_ = schedule;
106 instruction_zone_ = instruction_zone_scope_.zone();
107 }
108
109 // For register allocation testing entry point.
110 void InitializeTorTesting(InstructionSequence* sequence) {
111 instruction_zone_ = sequence->zone();
112 sequence_ = sequence;
113 }
114
115 Isolate* isolate() const { return isolate_; }
116 CompilationInfo* info() const { return info_; }
117 ZonePool* zone_pool() const { return zone_pool_; }
118 PipelineStatistics* pipeline_statistics() { return pipeline_statistics_; }
119 bool compilation_failed() const { return compilation_failed_; }
120 void set_compilation_failed() { compilation_failed_ = true; }
121 Handle<Code> code() { return code_; }
122 void set_code(Handle<Code> code) {
123 DCHECK(code_.is_null());
124 code_ = code;
125 }
126
127 // RawMachineAssembler generally produces graphs which cannot be verified.
128 bool MayHaveUnverifiableGraph() const { return outer_zone_ == nullptr; }
129
130 Zone* graph_zone() const { return graph_zone_; }
131 Graph* graph() const { return graph_; }
132 SourcePositionTable* source_positions() const {
133 return source_positions_.get();
134 }
135 MachineOperatorBuilder* machine() const { return machine_; }
136 CommonOperatorBuilder* common() const { return common_; }
137 JSOperatorBuilder* javascript() const { return javascript_; }
138 JSGraph* jsgraph() const { return jsgraph_; }
139 Typer* typer() const { return typer_.get(); }
140
141 LoopAssignmentAnalysis* loop_assignment() const { return loop_assignment_; }
142 void set_loop_assignment(LoopAssignmentAnalysis* loop_assignment) {
143 DCHECK_EQ(nullptr, loop_assignment_);
144 loop_assignment_ = loop_assignment;
145 }
146
147 Node* context_node() const { return context_node_; }
148 void set_context_node(Node* context_node) {
149 DCHECK_EQ(nullptr, context_node_);
150 context_node_ = context_node;
151 }
152
153 Schedule* schedule() const { return schedule_; }
154 void set_schedule(Schedule* schedule) {
155 DCHECK_EQ(nullptr, schedule_);
156 schedule_ = schedule;
157 }
158
159 Zone* instruction_zone() const { return instruction_zone_; }
160 InstructionSequence* sequence() const { return sequence_; }
161 Frame* frame() const { return frame_; }
162 RegisterAllocator* register_allocator() const { return register_allocator_; }
163
164 void DeleteGraphZone() {
165 // Destroy objects with destructors first.
166 source_positions_.Reset(nullptr);
167 typer_.Reset(nullptr);
168 if (graph_zone_ == nullptr) return;
169 // Destroy zone and clear pointers.
170 graph_zone_scope_.Destroy();
171 graph_zone_ = nullptr;
172 graph_ = nullptr;
173 loop_assignment_ = nullptr;
174 machine_ = nullptr;
175 common_ = nullptr;
176 javascript_ = nullptr;
177 jsgraph_ = nullptr;
178 context_node_ = nullptr;
179 schedule_ = nullptr;
180 }
181
182 void DeleteInstructionZone() {
183 if (instruction_zone_ == nullptr) return;
184 instruction_zone_scope_.Destroy();
185 instruction_zone_ = nullptr;
186 sequence_ = nullptr;
187 frame_ = nullptr;
188 register_allocator_ = nullptr;
189 }
190
191 void InitializeInstructionSequence() {
192 DCHECK_EQ(nullptr, sequence_);
193 InstructionBlocks* instruction_blocks =
194 InstructionSequence::InstructionBlocksFor(instruction_zone(),
195 schedule());
196 sequence_ = new (instruction_zone())
197 InstructionSequence(instruction_zone(), instruction_blocks);
198 }
199
200 void InitializeRegisterAllocator(Zone* local_zone,
201 const RegisterConfiguration* config,
202 const char* debug_name) {
203 DCHECK_EQ(nullptr, register_allocator_);
204 DCHECK_EQ(nullptr, frame_);
205 frame_ = new (instruction_zone()) Frame();
206 register_allocator_ = new (instruction_zone())
207 RegisterAllocator(config, local_zone, frame(), sequence(), debug_name);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000208 }
209
210 private:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400211 Isolate* isolate_;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000212 CompilationInfo* info_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400213 Zone* outer_zone_;
214 ZonePool* const zone_pool_;
215 PipelineStatistics* pipeline_statistics_;
216 bool compilation_failed_;
217 Handle<Code> code_;
218
219 // All objects in the following group of fields are allocated in graph_zone_.
220 // They are all set to NULL when the graph_zone_ is destroyed.
221 ZonePool::Scope graph_zone_scope_;
222 Zone* graph_zone_;
223 Graph* graph_;
224 // TODO(dcarney): make this into a ZoneObject.
225 SmartPointer<SourcePositionTable> source_positions_;
226 LoopAssignmentAnalysis* loop_assignment_;
227 MachineOperatorBuilder* machine_;
228 CommonOperatorBuilder* common_;
229 JSOperatorBuilder* javascript_;
230 JSGraph* jsgraph_;
231 // TODO(dcarney): make this into a ZoneObject.
232 SmartPointer<Typer> typer_;
233 Node* context_node_;
234 Schedule* schedule_;
235
236 // All objects in the following group of fields are allocated in
237 // instruction_zone_. They are all set to NULL when the instruction_zone_ is
238 // destroyed.
239 ZonePool::Scope instruction_zone_scope_;
240 Zone* instruction_zone_;
241 InstructionSequence* sequence_;
242 Frame* frame_;
243 RegisterAllocator* register_allocator_;
244
245 DISALLOW_COPY_AND_ASSIGN(PipelineData);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000246};
247
248
249static inline bool VerifyGraphs() {
250#ifdef DEBUG
251 return true;
252#else
253 return FLAG_turbo_verify;
254#endif
255}
256
257
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400258struct TurboCfgFile : public std::ofstream {
259 explicit TurboCfgFile(Isolate* isolate)
260 : std::ofstream(isolate->GetTurboCfgFileName().c_str(),
261 std::ios_base::app) {}
262};
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000263
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400264
265static void TraceSchedule(Schedule* schedule) {
266 if (!FLAG_trace_turbo_graph && !FLAG_trace_turbo_scheduler) return;
267 OFStream os(stdout);
268 os << "-- Schedule --------------------------------------\n" << *schedule;
269}
270
271
272static SmartArrayPointer<char> GetDebugName(CompilationInfo* info) {
273 SmartArrayPointer<char> name;
274 if (info->IsStub()) {
275 if (info->code_stub() != NULL) {
276 CodeStub::Major major_key = info->code_stub()->MajorKey();
277 const char* major_name = CodeStub::MajorName(major_key, false);
278 size_t len = strlen(major_name);
279 name.Reset(new char[len]);
280 memcpy(name.get(), major_name, len);
281 }
282 } else {
283 AllowHandleDereference allow_deref;
284 name = info->function()->debug_name()->ToCString();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000285 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400286 return name;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000287}
288
289
290class AstGraphBuilderWithPositions : public AstGraphBuilder {
291 public:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400292 AstGraphBuilderWithPositions(Zone* local_zone, CompilationInfo* info,
293 JSGraph* jsgraph,
294 LoopAssignmentAnalysis* loop_assignment,
295 SourcePositionTable* source_positions)
296 : AstGraphBuilder(local_zone, info, jsgraph, loop_assignment),
297 source_positions_(source_positions) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000298
299 bool CreateGraph() {
300 SourcePositionTable::Scope pos(source_positions_,
301 SourcePosition::Unknown());
302 return AstGraphBuilder::CreateGraph();
303 }
304
305#define DEF_VISIT(type) \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400306 void Visit##type(type* node) OVERRIDE { \
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000307 SourcePositionTable::Scope pos(source_positions_, \
308 SourcePosition(node->position())); \
309 AstGraphBuilder::Visit##type(node); \
310 }
311 AST_NODE_LIST(DEF_VISIT)
312#undef DEF_VISIT
313
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400314 Node* GetFunctionContext() { return AstGraphBuilder::GetFunctionContext(); }
315
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000316 private:
317 SourcePositionTable* source_positions_;
318};
319
320
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400321class PipelineRunScope {
322 public:
323 PipelineRunScope(PipelineData* data, const char* phase_name)
324 : phase_scope_(
325 phase_name == nullptr ? nullptr : data->pipeline_statistics(),
326 phase_name),
327 zone_scope_(data->zone_pool()) {}
328
329 Zone* zone() { return zone_scope_.zone(); }
330
331 private:
332 PhaseScope phase_scope_;
333 ZonePool::Scope zone_scope_;
334};
335
336
337template <typename Phase>
338void Pipeline::Run() {
339 PipelineRunScope scope(this->data_, Phase::phase_name());
340 Phase phase;
341 phase.Run(this->data_, scope.zone());
342}
343
344
345template <typename Phase, typename Arg0>
346void Pipeline::Run(Arg0 arg_0) {
347 PipelineRunScope scope(this->data_, Phase::phase_name());
348 Phase phase;
349 phase.Run(this->data_, scope.zone(), arg_0);
350}
351
352
353struct LoopAssignmentAnalysisPhase {
354 static const char* phase_name() { return "loop assignment analysis"; }
355
356 void Run(PipelineData* data, Zone* temp_zone) {
357 AstLoopAssignmentAnalyzer analyzer(data->graph_zone(), data->info());
358 LoopAssignmentAnalysis* loop_assignment = analyzer.Analyze();
359 data->set_loop_assignment(loop_assignment);
360 }
361};
362
363
364struct GraphBuilderPhase {
365 static const char* phase_name() { return "graph builder"; }
366
367 void Run(PipelineData* data, Zone* temp_zone) {
368 AstGraphBuilderWithPositions graph_builder(
369 temp_zone, data->info(), data->jsgraph(), data->loop_assignment(),
370 data->source_positions());
371 if (graph_builder.CreateGraph()) {
372 data->set_context_node(graph_builder.GetFunctionContext());
373 } else {
374 data->set_compilation_failed();
375 }
376 }
377};
378
379
380struct ContextSpecializerPhase {
381 static const char* phase_name() { return "context specializing"; }
382
383 void Run(PipelineData* data, Zone* temp_zone) {
384 SourcePositionTable::Scope pos(data->source_positions(),
385 SourcePosition::Unknown());
386 JSContextSpecializer spec(data->info(), data->jsgraph(),
387 data->context_node());
388 GraphReducer graph_reducer(data->graph(), temp_zone);
389 graph_reducer.AddReducer(&spec);
390 graph_reducer.ReduceGraph();
391 }
392};
393
394
395struct InliningPhase {
396 static const char* phase_name() { return "inlining"; }
397
398 void Run(PipelineData* data, Zone* temp_zone) {
399 SourcePositionTable::Scope pos(data->source_positions(),
400 SourcePosition::Unknown());
401 JSInliner inliner(temp_zone, data->info(), data->jsgraph());
402 inliner.Inline();
403 }
404};
405
406
407struct TyperPhase {
408 static const char* phase_name() { return "typer"; }
409
410 void Run(PipelineData* data, Zone* temp_zone) { data->typer()->Run(); }
411};
412
413
414struct TypedLoweringPhase {
415 static const char* phase_name() { return "typed lowering"; }
416
417 void Run(PipelineData* data, Zone* temp_zone) {
418 SourcePositionTable::Scope pos(data->source_positions(),
419 SourcePosition::Unknown());
420 ValueNumberingReducer vn_reducer(temp_zone);
421 LoadElimination load_elimination;
422 JSBuiltinReducer builtin_reducer(data->jsgraph());
423 JSTypedLowering typed_lowering(data->jsgraph(), temp_zone);
424 SimplifiedOperatorReducer simple_reducer(data->jsgraph());
425 CommonOperatorReducer common_reducer;
426 GraphReducer graph_reducer(data->graph(), temp_zone);
427 graph_reducer.AddReducer(&vn_reducer);
428 graph_reducer.AddReducer(&builtin_reducer);
429 graph_reducer.AddReducer(&typed_lowering);
430 graph_reducer.AddReducer(&load_elimination);
431 graph_reducer.AddReducer(&simple_reducer);
432 graph_reducer.AddReducer(&common_reducer);
433 graph_reducer.ReduceGraph();
434 }
435};
436
437
438struct SimplifiedLoweringPhase {
439 static const char* phase_name() { return "simplified lowering"; }
440
441 void Run(PipelineData* data, Zone* temp_zone) {
442 SourcePositionTable::Scope pos(data->source_positions(),
443 SourcePosition::Unknown());
444 SimplifiedLowering lowering(data->jsgraph(), temp_zone);
445 lowering.LowerAllNodes();
446 ValueNumberingReducer vn_reducer(temp_zone);
447 SimplifiedOperatorReducer simple_reducer(data->jsgraph());
448 MachineOperatorReducer machine_reducer(data->jsgraph());
449 CommonOperatorReducer common_reducer;
450 GraphReducer graph_reducer(data->graph(), temp_zone);
451 graph_reducer.AddReducer(&vn_reducer);
452 graph_reducer.AddReducer(&simple_reducer);
453 graph_reducer.AddReducer(&machine_reducer);
454 graph_reducer.AddReducer(&common_reducer);
455 graph_reducer.ReduceGraph();
456 }
457};
458
459
460struct ChangeLoweringPhase {
461 static const char* phase_name() { return "change lowering"; }
462
463 void Run(PipelineData* data, Zone* temp_zone) {
464 SourcePositionTable::Scope pos(data->source_positions(),
465 SourcePosition::Unknown());
466 Linkage linkage(data->graph_zone(), data->info());
467 ValueNumberingReducer vn_reducer(temp_zone);
468 SimplifiedOperatorReducer simple_reducer(data->jsgraph());
469 ChangeLowering lowering(data->jsgraph(), &linkage);
470 MachineOperatorReducer machine_reducer(data->jsgraph());
471 CommonOperatorReducer common_reducer;
472 GraphReducer graph_reducer(data->graph(), temp_zone);
473 graph_reducer.AddReducer(&vn_reducer);
474 graph_reducer.AddReducer(&simple_reducer);
475 graph_reducer.AddReducer(&lowering);
476 graph_reducer.AddReducer(&machine_reducer);
477 graph_reducer.AddReducer(&common_reducer);
478 graph_reducer.ReduceGraph();
479 }
480};
481
482
483struct ControlReductionPhase {
484 void Run(PipelineData* data, Zone* temp_zone) {
485 SourcePositionTable::Scope pos(data->source_positions(),
486 SourcePosition::Unknown());
487 ControlReducer::ReduceGraph(temp_zone, data->jsgraph(), data->common());
488 }
489};
490
491
492struct EarlyControlReductionPhase : ControlReductionPhase {
493 static const char* phase_name() { return "early control reduction"; }
494};
495
496
497struct LateControlReductionPhase : ControlReductionPhase {
498 static const char* phase_name() { return "late control reduction"; }
499};
500
501
502struct GenericLoweringPhase {
503 static const char* phase_name() { return "generic lowering"; }
504
505 void Run(PipelineData* data, Zone* temp_zone) {
506 SourcePositionTable::Scope pos(data->source_positions(),
507 SourcePosition::Unknown());
508 JSGenericLowering generic(data->info(), data->jsgraph());
509 SelectLowering select(data->jsgraph()->graph(), data->jsgraph()->common());
510 GraphReducer graph_reducer(data->graph(), temp_zone);
511 graph_reducer.AddReducer(&generic);
512 graph_reducer.AddReducer(&select);
513 graph_reducer.ReduceGraph();
514 }
515};
516
517
518struct ComputeSchedulePhase {
519 static const char* phase_name() { return "scheduling"; }
520
521 void Run(PipelineData* data, Zone* temp_zone) {
522 Schedule* schedule = Scheduler::ComputeSchedule(temp_zone, data->graph());
523 TraceSchedule(schedule);
524 if (VerifyGraphs()) ScheduleVerifier::Run(schedule);
525 data->set_schedule(schedule);
526 }
527};
528
529
530struct InstructionSelectionPhase {
531 static const char* phase_name() { return "select instructions"; }
532
533 void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
534 InstructionSelector selector(temp_zone, data->graph(), linkage,
535 data->sequence(), data->schedule(),
536 data->source_positions());
537 selector.SelectInstructions();
538 }
539};
540
541
542struct MeetRegisterConstraintsPhase {
543 static const char* phase_name() { return "meet register constraints"; }
544
545 void Run(PipelineData* data, Zone* temp_zone) {
546 data->register_allocator()->MeetRegisterConstraints();
547 }
548};
549
550
551struct ResolvePhisPhase {
552 static const char* phase_name() { return "resolve phis"; }
553
554 void Run(PipelineData* data, Zone* temp_zone) {
555 data->register_allocator()->ResolvePhis();
556 }
557};
558
559
560struct BuildLiveRangesPhase {
561 static const char* phase_name() { return "build live ranges"; }
562
563 void Run(PipelineData* data, Zone* temp_zone) {
564 data->register_allocator()->BuildLiveRanges();
565 }
566};
567
568
569struct AllocateGeneralRegistersPhase {
570 static const char* phase_name() { return "allocate general registers"; }
571
572 void Run(PipelineData* data, Zone* temp_zone) {
573 data->register_allocator()->AllocateGeneralRegisters();
574 }
575};
576
577
578struct AllocateDoubleRegistersPhase {
579 static const char* phase_name() { return "allocate double registers"; }
580
581 void Run(PipelineData* data, Zone* temp_zone) {
582 data->register_allocator()->AllocateDoubleRegisters();
583 }
584};
585
586
587struct ReuseSpillSlotsPhase {
588 static const char* phase_name() { return "reuse spill slots"; }
589
590 void Run(PipelineData* data, Zone* temp_zone) {
591 data->register_allocator()->ReuseSpillSlots();
592 }
593};
594
595
596struct CommitAssignmentPhase {
597 static const char* phase_name() { return "commit assignment"; }
598
599 void Run(PipelineData* data, Zone* temp_zone) {
600 data->register_allocator()->CommitAssignment();
601 }
602};
603
604
605struct PopulatePointerMapsPhase {
606 static const char* phase_name() { return "populate pointer maps"; }
607
608 void Run(PipelineData* data, Zone* temp_zone) {
609 data->register_allocator()->PopulatePointerMaps();
610 }
611};
612
613
614struct ConnectRangesPhase {
615 static const char* phase_name() { return "connect ranges"; }
616
617 void Run(PipelineData* data, Zone* temp_zone) {
618 data->register_allocator()->ConnectRanges();
619 }
620};
621
622
623struct ResolveControlFlowPhase {
624 static const char* phase_name() { return "resolve control flow"; }
625
626 void Run(PipelineData* data, Zone* temp_zone) {
627 data->register_allocator()->ResolveControlFlow();
628 }
629};
630
631
632struct OptimizeMovesPhase {
633 static const char* phase_name() { return "optimize moves"; }
634
635 void Run(PipelineData* data, Zone* temp_zone) {
636 MoveOptimizer move_optimizer(temp_zone, data->sequence());
637 move_optimizer.Run();
638 }
639};
640
641
642struct JumpThreadingPhase {
643 static const char* phase_name() { return "jump threading"; }
644
645 void Run(PipelineData* data, Zone* temp_zone) {
646 ZoneVector<BasicBlock::RpoNumber> result(temp_zone);
647 if (JumpThreading::ComputeForwarding(temp_zone, result, data->sequence())) {
648 JumpThreading::ApplyForwarding(result, data->sequence());
649 }
650 }
651};
652
653
654struct GenerateCodePhase {
655 static const char* phase_name() { return "generate code"; }
656
657 void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
658 CodeGenerator generator(data->frame(), linkage, data->sequence(),
659 data->info());
660 data->set_code(generator.GenerateCode());
661 }
662};
663
664
665struct PrintGraphPhase {
666 static const char* phase_name() { return nullptr; }
667
668 void Run(PipelineData* data, Zone* temp_zone, const char* phase) {
669 CompilationInfo* info = data->info();
670 Graph* graph = data->graph();
671 char buffer[256];
672 Vector<char> filename(buffer, sizeof(buffer));
673 SmartArrayPointer<char> functionname;
674 if (!info->shared_info().is_null()) {
675 functionname = info->shared_info()->DebugName()->ToCString();
676 if (strlen(functionname.get()) > 0) {
677 SNPrintF(filename, "turbo-%s-%s", functionname.get(), phase);
678 } else {
679 SNPrintF(filename, "turbo-%p-%s", static_cast<void*>(info), phase);
680 }
681 } else {
682 SNPrintF(filename, "turbo-none-%s", phase);
683 }
684 std::replace(filename.start(), filename.start() + filename.length(), ' ',
685 '_');
686
687 { // Print dot.
688 char dot_buffer[256];
689 Vector<char> dot_filename(dot_buffer, sizeof(dot_buffer));
690 SNPrintF(dot_filename, "%s.dot", filename.start());
691 FILE* dot_file = base::OS::FOpen(dot_filename.start(), "w+");
692 if (dot_file == nullptr) return;
693 OFStream dot_of(dot_file);
694 dot_of << AsDOT(*graph);
695 fclose(dot_file);
696 }
697
698 { // Print JSON.
699 char json_buffer[256];
700 Vector<char> json_filename(json_buffer, sizeof(json_buffer));
701 SNPrintF(json_filename, "%s.json", filename.start());
702 FILE* json_file = base::OS::FOpen(json_filename.start(), "w+");
703 if (json_file == nullptr) return;
704 OFStream json_of(json_file);
705 json_of << AsJSON(*graph);
706 fclose(json_file);
707 }
708
709 OFStream os(stdout);
710 if (FLAG_trace_turbo_graph) { // Simple textual RPO.
711 os << "-- Graph after " << phase << " -- " << std::endl;
712 os << AsRPO(*graph);
713 }
714
715 os << "-- " << phase << " graph printed to file " << filename.start()
716 << std::endl;
717 }
718};
719
720
721struct VerifyGraphPhase {
722 static const char* phase_name() { return nullptr; }
723
724 void Run(PipelineData* data, Zone* temp_zone, const bool untyped) {
725 Verifier::Run(data->graph(), FLAG_turbo_types && !untyped
726 ? Verifier::TYPED
727 : Verifier::UNTYPED);
728 }
729};
730
731
732void Pipeline::BeginPhaseKind(const char* phase_kind_name) {
733 if (data_->pipeline_statistics() != NULL) {
734 data_->pipeline_statistics()->BeginPhaseKind(phase_kind_name);
735 }
736}
737
738
739void Pipeline::RunPrintAndVerify(const char* phase, bool untyped) {
740 if (FLAG_trace_turbo) {
741 Run<PrintGraphPhase>(phase);
742 }
743 if (VerifyGraphs()) {
744 Run<VerifyGraphPhase>(untyped);
745 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000746}
747
748
749Handle<Code> Pipeline::GenerateCode() {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400750 // This list must be kept in sync with DONT_TURBOFAN_NODE in ast.cc.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000751 if (info()->function()->dont_optimize_reason() == kTryCatchStatement ||
752 info()->function()->dont_optimize_reason() == kTryFinallyStatement ||
753 // TODO(turbofan): Make ES6 for-of work and remove this bailout.
754 info()->function()->dont_optimize_reason() == kForOfStatement ||
755 // TODO(turbofan): Make super work and remove this bailout.
756 info()->function()->dont_optimize_reason() == kSuperReference ||
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400757 // TODO(turbofan): Make class literals work and remove this bailout.
758 info()->function()->dont_optimize_reason() == kClassLiteral ||
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000759 // TODO(turbofan): Make OSR work and remove this bailout.
760 info()->is_osr()) {
761 return Handle<Code>::null();
762 }
763
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400764 ZonePool zone_pool(isolate());
765 SmartPointer<PipelineStatistics> pipeline_statistics;
766
767 if (FLAG_turbo_stats) {
768 pipeline_statistics.Reset(new PipelineStatistics(info(), &zone_pool));
769 pipeline_statistics->BeginPhaseKind("initializing");
770 }
771
772 PipelineData data(&zone_pool, info());
773 this->data_ = &data;
774 data.Initialize(pipeline_statistics.get());
775
776 BeginPhaseKind("graph creation");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000777
778 if (FLAG_trace_turbo) {
779 OFStream os(stdout);
780 os << "---------------------------------------------------\n"
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400781 << "Begin compiling method " << GetDebugName(info()).get()
782 << " using Turbofan" << std::endl;
783 TurboCfgFile tcf(isolate());
784 tcf << AsC1VCompilation(info());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000785 }
786
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400787 data.source_positions()->AddDecorator();
788
789 if (FLAG_loop_assignment_analysis) {
790 Run<LoopAssignmentAnalysisPhase>();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000791 }
792
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400793 Run<GraphBuilderPhase>();
794 if (data.compilation_failed()) return Handle<Code>::null();
795 RunPrintAndVerify("Initial untyped", true);
796
797 Run<EarlyControlReductionPhase>();
798 RunPrintAndVerify("Early Control reduced", true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000799
800 if (info()->is_context_specializing()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000801 // Specialize the code to the context as aggressively as possible.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400802 Run<ContextSpecializerPhase>();
803 RunPrintAndVerify("Context specialized", true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000804 }
805
806 if (info()->is_inlining_enabled()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400807 Run<InliningPhase>();
808 RunPrintAndVerify("Inlined", true);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000809 }
810
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000811 if (FLAG_print_turbo_replay) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400812 // Print a replay of the initial graph.
813 GraphReplayPrinter::PrintReplay(data.graph());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000814 }
815
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400816 // Bailout here in case target architecture is not supported.
817 if (!SupportedTarget()) return Handle<Code>::null();
818
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000819 if (info()->is_typing_enabled()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400820 // Type the graph.
821 Run<TyperPhase>();
822 RunPrintAndVerify("Typed");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000823 }
824
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400825 BeginPhaseKind("lowering");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000826
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400827 if (info()->is_typing_enabled()) {
828 // Lower JSOperators where we can determine types.
829 Run<TypedLoweringPhase>();
830 RunPrintAndVerify("Lowered typed");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000831
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400832 // Lower simplified operators and insert changes.
833 Run<SimplifiedLoweringPhase>();
834 RunPrintAndVerify("Lowered simplified");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000835
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400836 // Lower changes that have been inserted before.
837 Run<ChangeLoweringPhase>();
838 // // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
839 RunPrintAndVerify("Lowered changes", true);
840
841 Run<LateControlReductionPhase>();
842 RunPrintAndVerify("Late Control reduced");
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000843 }
844
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400845 // Lower any remaining generic JSOperators.
846 Run<GenericLoweringPhase>();
847 // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
848 RunPrintAndVerify("Lowered generic", true);
849
850 BeginPhaseKind("block building");
851
852 data.source_positions()->RemoveDecorator();
853
854 // Compute a schedule.
855 Run<ComputeSchedulePhase>();
856
857 {
858 // Generate optimized code.
859 Linkage linkage(data.instruction_zone(), info());
860 GenerateCode(&linkage);
861 }
862 Handle<Code> code = data.code();
863 info()->SetCode(code);
864
865 // Print optimized code.
866 v8::internal::CodeGenerator::PrintCode(code, info());
867
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000868 if (FLAG_trace_turbo) {
869 OFStream os(stdout);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400870 os << "---------------------------------------------------\n"
871 << "Finished compiling method " << GetDebugName(info()).get()
872 << " using Turbofan" << std::endl;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000873 }
874
875 return code;
876}
877
878
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400879Handle<Code> Pipeline::GenerateCodeForTesting(CompilationInfo* info,
880 Graph* graph,
881 Schedule* schedule) {
882 CallDescriptor* call_descriptor =
883 Linkage::ComputeIncoming(info->zone(), info);
884 return GenerateCodeForTesting(info, call_descriptor, graph, schedule);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000885}
886
887
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400888Handle<Code> Pipeline::GenerateCodeForTesting(CallDescriptor* call_descriptor,
889 Graph* graph,
890 Schedule* schedule) {
891 CompilationInfo info(graph->zone()->isolate(), graph->zone());
892 return GenerateCodeForTesting(&info, call_descriptor, graph, schedule);
893}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000894
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400895
896Handle<Code> Pipeline::GenerateCodeForTesting(CompilationInfo* info,
897 CallDescriptor* call_descriptor,
898 Graph* graph,
899 Schedule* schedule) {
900 CHECK(SupportedBackend());
901 ZonePool zone_pool(info->isolate());
902 Pipeline pipeline(info);
903 PipelineData data(&zone_pool, info);
904 pipeline.data_ = &data;
905 data.InitializeTorTesting(graph, schedule);
906 if (schedule == NULL) {
907 // TODO(rossberg): Should this really be untyped?
908 pipeline.RunPrintAndVerify("Machine", true);
909 pipeline.Run<ComputeSchedulePhase>();
910 } else {
911 TraceSchedule(schedule);
912 }
913
914 Linkage linkage(info->zone(), call_descriptor);
915 pipeline.GenerateCode(&linkage);
916 Handle<Code> code = data.code();
917
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000918#if ENABLE_DISASSEMBLER
919 if (!code.is_null() && FLAG_print_opt_code) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400920 CodeTracer::Scope tracing_scope(info->isolate()->GetCodeTracer());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000921 OFStream os(tracing_scope.file());
922 code->Disassemble("test code", os);
923 }
924#endif
925 return code;
926}
927
928
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400929bool Pipeline::AllocateRegistersForTesting(const RegisterConfiguration* config,
930 InstructionSequence* sequence,
931 bool run_verifier) {
932 CompilationInfo info(sequence->zone()->isolate(), sequence->zone());
933 ZonePool zone_pool(sequence->zone()->isolate());
934 PipelineData data(&zone_pool, &info);
935 data.InitializeTorTesting(sequence);
936 Pipeline pipeline(&info);
937 pipeline.data_ = &data;
938 pipeline.AllocateRegisters(config, run_verifier);
939 return !data.compilation_failed();
940}
941
942
943void Pipeline::GenerateCode(Linkage* linkage) {
944 PipelineData* data = this->data_;
945
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000946 DCHECK_NOT_NULL(linkage);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400947 DCHECK_NOT_NULL(data->graph());
948 DCHECK_NOT_NULL(data->schedule());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000949 CHECK(SupportedBackend());
950
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400951 BasicBlockProfiler::Data* profiler_data = NULL;
952 if (FLAG_turbo_profiling) {
953 profiler_data = BasicBlockInstrumentor::Instrument(info(), data->graph(),
954 data->schedule());
955 }
956
957 data->InitializeInstructionSequence();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000958
959 // Select and schedule instructions covering the scheduled graph.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400960 Run<InstructionSelectionPhase>(linkage);
961
962 if (FLAG_trace_turbo && !data->MayHaveUnverifiableGraph()) {
963 TurboCfgFile tcf(isolate());
964 tcf << AsC1V("CodeGen", data->schedule(), data->source_positions(),
965 data->sequence());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000966 }
967
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400968 data->DeleteGraphZone();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000969
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400970 BeginPhaseKind("register allocation");
971
972 bool run_verifier = false;
973#ifdef DEBUG
974 run_verifier = true;
975#endif
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000976 // Allocate registers.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400977 AllocateRegisters(RegisterConfiguration::ArchDefault(), run_verifier);
978 if (data->compilation_failed()) {
979 info()->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
980 return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000981 }
982
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400983 BeginPhaseKind("code generation");
984
985 // Optimimize jumps.
986 if (FLAG_turbo_jt) {
987 Run<JumpThreadingPhase>();
988 }
989
990 // Generate final machine code.
991 Run<GenerateCodePhase>(linkage);
992
993 if (profiler_data != NULL) {
994#if ENABLE_DISASSEMBLER
995 std::ostringstream os;
996 data->code()->Disassemble(NULL, os);
997 profiler_data->SetCode(&os);
998#endif
999 }
1000}
1001
1002
1003void Pipeline::AllocateRegisters(const RegisterConfiguration* config,
1004 bool run_verifier) {
1005 PipelineData* data = this->data_;
1006
1007 int node_count = data->sequence()->VirtualRegisterCount();
1008 if (node_count > UnallocatedOperand::kMaxVirtualRegisters) {
1009 data->set_compilation_failed();
1010 return;
1011 }
1012
1013 // Don't track usage for this zone in compiler stats.
1014 SmartPointer<Zone> verifier_zone;
1015 RegisterAllocatorVerifier* verifier = nullptr;
1016 if (run_verifier) {
1017 verifier_zone.Reset(new Zone(info()->isolate()));
1018 verifier = new (verifier_zone.get()) RegisterAllocatorVerifier(
1019 verifier_zone.get(), config, data->sequence());
1020 }
1021
1022 SmartArrayPointer<char> debug_name;
1023#ifdef DEBUG
1024 debug_name = GetDebugName(data->info());
1025#endif
1026
1027 ZonePool::Scope zone_scope(data->zone_pool());
1028 data->InitializeRegisterAllocator(zone_scope.zone(), config,
1029 debug_name.get());
1030
1031 Run<MeetRegisterConstraintsPhase>();
1032 Run<ResolvePhisPhase>();
1033 Run<BuildLiveRangesPhase>();
1034 if (FLAG_trace_turbo_graph) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001035 OFStream os(stdout);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001036 PrintableInstructionSequence printable = {config, data->sequence()};
1037 os << "----- Instruction sequence before register allocation -----\n"
1038 << printable;
1039 }
1040 if (verifier != nullptr) {
1041 CHECK(!data->register_allocator()->ExistsUseWithoutDefinition());
1042 }
1043 Run<AllocateGeneralRegistersPhase>();
1044 if (!data->register_allocator()->AllocationOk()) {
1045 data->set_compilation_failed();
1046 return;
1047 }
1048 Run<AllocateDoubleRegistersPhase>();
1049 if (!data->register_allocator()->AllocationOk()) {
1050 data->set_compilation_failed();
1051 return;
1052 }
1053 if (FLAG_turbo_reuse_spill_slots) {
1054 Run<ReuseSpillSlotsPhase>();
1055 }
1056 Run<CommitAssignmentPhase>();
1057 Run<PopulatePointerMapsPhase>();
1058 Run<ConnectRangesPhase>();
1059 Run<ResolveControlFlowPhase>();
1060 if (FLAG_turbo_move_optimization) {
1061 Run<OptimizeMovesPhase>();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001062 }
1063
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001064 if (FLAG_trace_turbo_graph) {
1065 OFStream os(stdout);
1066 PrintableInstructionSequence printable = {config, data->sequence()};
1067 os << "----- Instruction sequence after register allocation -----\n"
1068 << printable;
1069 }
1070
1071 if (verifier != nullptr) {
1072 verifier->VerifyAssignment();
1073 verifier->VerifyGapMoves();
1074 }
1075
1076 if (FLAG_trace_turbo && !data->MayHaveUnverifiableGraph()) {
1077 TurboCfgFile tcf(data->isolate());
1078 tcf << AsC1VAllocator("CodeGen", data->register_allocator());
1079 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001080}
1081
1082
1083void Pipeline::SetUp() {
1084 InstructionOperand::SetUpCaches();
1085}
1086
1087
1088void Pipeline::TearDown() {
1089 InstructionOperand::TearDownCaches();
1090}
1091
1092} // namespace compiler
1093} // namespace internal
1094} // namespace v8