blob: a53919268f85281984a24038aab4810c87ea42bf [file] [log] [blame]
Nicolas Geoffrayb34f69a2014-03-07 15:28:39 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampe53c913b2014-08-12 23:19:23 -070017#include "optimizing_compiler.h"
18
Nicolas Geoffrayf635e632014-05-14 09:43:38 +010019#include <fstream>
Nicolas Geoffray787c3072014-03-17 10:20:19 +000020#include <stdint.h>
21
22#include "builder.h"
23#include "code_generator.h"
Andreas Gampe53c913b2014-08-12 23:19:23 -070024#include "compiler.h"
Nicolas Geoffray787c3072014-03-17 10:20:19 +000025#include "driver/compiler_driver.h"
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +000026#include "driver/dex_compilation_unit.h"
Nicolas Geoffrayf635e632014-05-14 09:43:38 +010027#include "graph_visualizer.h"
Nicolas Geoffray787c3072014-03-17 10:20:19 +000028#include "nodes.h"
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010029#include "register_allocator.h"
Nicolas Geoffray7dc206a2014-07-11 09:49:49 +010030#include "ssa_phi_elimination.h"
Nicolas Geoffray804d0932014-05-02 08:46:00 +010031#include "ssa_liveness_analysis.h"
Nicolas Geoffray787c3072014-03-17 10:20:19 +000032#include "utils/arena_allocator.h"
Nicolas Geoffrayb34f69a2014-03-07 15:28:39 +000033
34namespace art {
35
Nicolas Geoffray787c3072014-03-17 10:20:19 +000036/**
37 * Used by the code generator, to allocate the code in a vector.
38 */
39class CodeVectorAllocator FINAL : public CodeAllocator {
40 public:
Nicolas Geoffray88157ef2014-09-12 10:29:53 +010041 CodeVectorAllocator() {}
Nicolas Geoffray787c3072014-03-17 10:20:19 +000042
43 virtual uint8_t* Allocate(size_t size) {
44 size_ = size;
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +000045 memory_.resize(size);
Nicolas Geoffray787c3072014-03-17 10:20:19 +000046 return &memory_[0];
47 }
48
49 size_t GetSize() const { return size_; }
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +000050 const std::vector<uint8_t>& GetMemory() const { return memory_; }
Nicolas Geoffray787c3072014-03-17 10:20:19 +000051
52 private:
53 std::vector<uint8_t> memory_;
54 size_t size_;
55
56 DISALLOW_COPY_AND_ASSIGN(CodeVectorAllocator);
57};
58
Nicolas Geoffrayf635e632014-05-14 09:43:38 +010059/**
60 * If set to true, generates a file suitable for the c1visualizer tool and IRHydra.
61 */
62static bool kIsVisualizerEnabled = false;
63
64/**
65 * Filter to apply to the visualizer. Methods whose name contain that filter will
66 * be in the file.
67 */
68static const char* kStringFilter = "";
69
Andreas Gampe53c913b2014-08-12 23:19:23 -070070class OptimizingCompiler FINAL : public Compiler {
71 public:
72 explicit OptimizingCompiler(CompilerDriver* driver);
Nicolas Geoffray88157ef2014-09-12 10:29:53 +010073 ~OptimizingCompiler();
Andreas Gampe53c913b2014-08-12 23:19:23 -070074
75 bool CanCompileMethod(uint32_t method_idx, const DexFile& dex_file, CompilationUnit* cu) const
76 OVERRIDE;
77
78 CompiledMethod* Compile(const DexFile::CodeItem* code_item,
79 uint32_t access_flags,
80 InvokeType invoke_type,
81 uint16_t class_def_idx,
82 uint32_t method_idx,
83 jobject class_loader,
84 const DexFile& dex_file) const OVERRIDE;
85
86 CompiledMethod* TryCompile(const DexFile::CodeItem* code_item,
87 uint32_t access_flags,
88 InvokeType invoke_type,
89 uint16_t class_def_idx,
90 uint32_t method_idx,
91 jobject class_loader,
92 const DexFile& dex_file) const;
93
94 // For the following methods we will use the fallback. This is a delegation pattern.
95 CompiledMethod* JniCompile(uint32_t access_flags,
96 uint32_t method_idx,
97 const DexFile& dex_file) const OVERRIDE;
98
99 uintptr_t GetEntryPointOf(mirror::ArtMethod* method) const OVERRIDE
100 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
101
102 bool WriteElf(art::File* file,
103 OatWriter* oat_writer,
104 const std::vector<const art::DexFile*>& dex_files,
105 const std::string& android_root,
106 bool is_host) const OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
107
108 Backend* GetCodeGenerator(CompilationUnit* cu, void* compilation_unit) const OVERRIDE;
109
110 void InitCompilationUnit(CompilationUnit& cu) const OVERRIDE;
111
112 void Init() const OVERRIDE;
113
114 void UnInit() const OVERRIDE;
115
116 private:
Nicolas Geoffray88157ef2014-09-12 10:29:53 +0100117 // Whether we should run any optimization or register allocation. If false, will
118 // just run the code generation after the graph was built.
119 const bool run_optimizations_;
120 mutable AtomicInteger total_compiled_methods_;
121 mutable AtomicInteger unoptimized_compiled_methods_;
122 mutable AtomicInteger optimized_compiled_methods_;
123
Andreas Gampe53c913b2014-08-12 23:19:23 -0700124 std::unique_ptr<std::ostream> visualizer_output_;
125
126 // Delegate to another compiler in case the optimizing compiler cannot compile a method.
127 // Currently the fallback is the quick compiler.
128 std::unique_ptr<Compiler> delegate_;
129
130 DISALLOW_COPY_AND_ASSIGN(OptimizingCompiler);
131};
132
Nicolas Geoffray88157ef2014-09-12 10:29:53 +0100133static const int kMaximumCompilationTimeBeforeWarning = 100; /* ms */
134
135OptimizingCompiler::OptimizingCompiler(CompilerDriver* driver)
136 : Compiler(driver, kMaximumCompilationTimeBeforeWarning),
137 run_optimizations_(
138 driver->GetCompilerOptions().GetCompilerFilter() != CompilerOptions::kTime),
139 total_compiled_methods_(0),
140 unoptimized_compiled_methods_(0),
141 optimized_compiled_methods_(0),
142 delegate_(Create(driver, Compiler::Kind::kQuick)) {
Nicolas Geoffrayf635e632014-05-14 09:43:38 +0100143 if (kIsVisualizerEnabled) {
144 visualizer_output_.reset(new std::ofstream("art.cfg"));
145 }
146}
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000147
Andreas Gampe53c913b2014-08-12 23:19:23 -0700148void OptimizingCompiler::Init() const {
149 delegate_->Init();
150}
151
152void OptimizingCompiler::UnInit() const {
153 delegate_->UnInit();
154}
155
Nicolas Geoffray88157ef2014-09-12 10:29:53 +0100156OptimizingCompiler::~OptimizingCompiler() {
157 size_t unoptimized_percent = (unoptimized_compiled_methods_ * 100 / total_compiled_methods_);
158 size_t optimized_percent = (optimized_compiled_methods_ * 100 / total_compiled_methods_);
159 LOG(INFO) << "Compiled " << total_compiled_methods_ << " methods: "
160 << unoptimized_percent << "% (" << unoptimized_compiled_methods_ << ") unoptimized, "
161 << optimized_percent << "% (" << optimized_compiled_methods_ << ") optimized.";
162}
163
Andreas Gampe53c913b2014-08-12 23:19:23 -0700164bool OptimizingCompiler::CanCompileMethod(uint32_t method_idx, const DexFile& dex_file,
165 CompilationUnit* cu) const {
166 return delegate_->CanCompileMethod(method_idx, dex_file, cu);
167}
168
169CompiledMethod* OptimizingCompiler::JniCompile(uint32_t access_flags,
170 uint32_t method_idx,
171 const DexFile& dex_file) const {
172 return delegate_->JniCompile(access_flags, method_idx, dex_file);
173}
174
175uintptr_t OptimizingCompiler::GetEntryPointOf(mirror::ArtMethod* method) const {
176 return delegate_->GetEntryPointOf(method);
177}
178
179bool OptimizingCompiler::WriteElf(art::File* file, OatWriter* oat_writer,
180 const std::vector<const art::DexFile*>& dex_files,
181 const std::string& android_root, bool is_host) const {
182 return delegate_->WriteElf(file, oat_writer, dex_files, android_root, is_host);
183}
184
185Backend* OptimizingCompiler::GetCodeGenerator(CompilationUnit* cu, void* compilation_unit) const {
186 return delegate_->GetCodeGenerator(cu, compilation_unit);
187}
188
189void OptimizingCompiler::InitCompilationUnit(CompilationUnit& cu) const {
190 delegate_->InitCompilationUnit(cu);
191}
192
Ian Rogers72d32622014-05-06 16:20:11 -0700193CompiledMethod* OptimizingCompiler::TryCompile(const DexFile::CodeItem* code_item,
Nicolas Geoffrayb34f69a2014-03-07 15:28:39 +0000194 uint32_t access_flags,
195 InvokeType invoke_type,
196 uint16_t class_def_idx,
197 uint32_t method_idx,
198 jobject class_loader,
199 const DexFile& dex_file) const {
Nicolas Geoffray88157ef2014-09-12 10:29:53 +0100200 total_compiled_methods_++;
Nicolas Geoffray8fb5ce32014-07-04 09:43:26 +0100201 InstructionSet instruction_set = GetCompilerDriver()->GetInstructionSet();
Nicolas Geoffray8d486732014-07-16 16:23:40 +0100202 // Always use the thumb2 assembler: some runtime functionality (like implicit stack
203 // overflow checks) assume thumb2.
204 if (instruction_set == kArm) {
205 instruction_set = kThumb2;
Nicolas Geoffray8fb5ce32014-07-04 09:43:26 +0100206 }
207
208 // Do not attempt to compile on architectures we do not support.
Nicolas Geoffray8d486732014-07-16 16:23:40 +0100209 if (instruction_set != kX86 && instruction_set != kX86_64 && instruction_set != kThumb2) {
Nicolas Geoffray8fb5ce32014-07-04 09:43:26 +0100210 return nullptr;
211 }
212
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +0000213 DexCompilationUnit dex_compilation_unit(
214 nullptr, class_loader, art::Runtime::Current()->GetClassLinker(), dex_file, code_item,
Ian Rogers72d32622014-05-06 16:20:11 -0700215 class_def_idx, method_idx, access_flags,
216 GetCompilerDriver()->GetVerifiedMethod(&dex_file, method_idx));
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +0000217
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +0000218 // For testing purposes, we put a special marker on method names that should be compiled
219 // with this compiler. This makes sure we're not regressing.
220 bool shouldCompile = dex_compilation_unit.GetSymbol().find("00024opt_00024") != std::string::npos;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100221 bool shouldOptimize =
222 dex_compilation_unit.GetSymbol().find("00024reg_00024") != std::string::npos;
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +0000223
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000224 ArenaPool pool;
225 ArenaAllocator arena(&pool);
Nicolas Geoffraye5038322014-07-04 09:41:32 +0100226 HGraphBuilder builder(&arena, &dex_compilation_unit, &dex_file, GetCompilerDriver());
Nicolas Geoffrayf635e632014-05-14 09:43:38 +0100227
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000228 HGraph* graph = builder.BuildGraph(*code_item);
229 if (graph == nullptr) {
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +0000230 if (shouldCompile) {
231 LOG(FATAL) << "Could not build graph in optimizing compiler";
232 }
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000233 return nullptr;
234 }
235
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000236 CodeGenerator* codegen = CodeGenerator::Create(&arena, graph, instruction_set);
237 if (codegen == nullptr) {
Nicolas Geoffray8ccc3f52014-03-19 10:34:11 +0000238 if (shouldCompile) {
239 LOG(FATAL) << "Could not find code generator for optimizing compiler";
240 }
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000241 return nullptr;
242 }
243
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100244 HGraphVisualizer visualizer(
245 visualizer_output_.get(), graph, kStringFilter, *codegen, dex_compilation_unit);
246 visualizer.DumpGraph("builder");
247
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000248 CodeVectorAllocator allocator;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100249
Nicolas Geoffray88157ef2014-09-12 10:29:53 +0100250 if (run_optimizations_ && RegisterAllocator::CanAllocateRegistersFor(*graph, instruction_set)) {
251 optimized_compiled_methods_++;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100252 graph->BuildDominatorTree();
253 graph->TransformToSSA();
254 visualizer.DumpGraph("ssa");
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100255 graph->FindNaturalLoops();
Nicolas Geoffray7dc206a2014-07-11 09:49:49 +0100256
257 SsaRedundantPhiElimination(graph).Run();
258 SsaDeadPhiElimination(graph).Run();
259
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100260 SsaLivenessAnalysis liveness(*graph, codegen);
261 liveness.Analyze();
262 visualizer.DumpGraph(kLivenessPassName);
263
264 RegisterAllocator register_allocator(graph->GetArena(), codegen, liveness);
265 register_allocator.AllocateRegisters();
266
267 visualizer.DumpGraph(kRegisterAllocatorPassName);
268 codegen->CompileOptimized(&allocator);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100269
270 std::vector<uint8_t> mapping_table;
271 SrcMap src_mapping_table;
272 codegen->BuildMappingTable(&mapping_table,
273 GetCompilerDriver()->GetCompilerOptions().GetIncludeDebugSymbols() ?
274 &src_mapping_table : nullptr);
275
276 std::vector<uint8_t> stack_map;
277 codegen->BuildStackMaps(&stack_map);
278
279 return new CompiledMethod(GetCompilerDriver(),
280 instruction_set,
281 allocator.GetMemory(),
282 codegen->GetFrameSize(),
283 codegen->GetCoreSpillMask(),
284 0, /* FPR spill mask, unused */
285 mapping_table,
286 stack_map);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100287 } else if (shouldOptimize && RegisterAllocator::Supports(instruction_set)) {
288 LOG(FATAL) << "Could not allocate registers in optimizing compiler";
Nicolas Geoffray39468442014-09-02 15:17:15 +0100289 return nullptr;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100290 } else {
Nicolas Geoffray88157ef2014-09-12 10:29:53 +0100291 unoptimized_compiled_methods_++;
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100292 codegen->CompileBaseline(&allocator);
293
294 // Run these phases to get some test coverage.
295 graph->BuildDominatorTree();
296 graph->TransformToSSA();
297 visualizer.DumpGraph("ssa");
298 graph->FindNaturalLoops();
299 SsaLivenessAnalysis liveness(*graph, codegen);
300 liveness.Analyze();
301 visualizer.DumpGraph(kLivenessPassName);
Nicolas Geoffray39468442014-09-02 15:17:15 +0100302
303 std::vector<uint8_t> mapping_table;
304 SrcMap src_mapping_table;
305 codegen->BuildMappingTable(&mapping_table,
306 GetCompilerDriver()->GetCompilerOptions().GetIncludeDebugSymbols() ?
307 &src_mapping_table : nullptr);
308 std::vector<uint8_t> vmap_table;
309 codegen->BuildVMapTable(&vmap_table);
310 std::vector<uint8_t> gc_map;
311 codegen->BuildNativeGCMap(&gc_map, dex_compilation_unit);
312
313 return new CompiledMethod(GetCompilerDriver(),
314 instruction_set,
315 allocator.GetMemory(),
316 codegen->GetFrameSize(),
317 codegen->GetCoreSpillMask(),
318 0, /* FPR spill mask, unused */
319 &src_mapping_table,
320 mapping_table,
321 vmap_table,
322 gc_map,
323 nullptr);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100324 }
Nicolas Geoffrayb34f69a2014-03-07 15:28:39 +0000325}
326
Andreas Gampe53c913b2014-08-12 23:19:23 -0700327CompiledMethod* OptimizingCompiler::Compile(const DexFile::CodeItem* code_item,
328 uint32_t access_flags,
329 InvokeType invoke_type,
330 uint16_t class_def_idx,
331 uint32_t method_idx,
332 jobject class_loader,
333 const DexFile& dex_file) const {
334 CompiledMethod* method = TryCompile(code_item, access_flags, invoke_type, class_def_idx,
335 method_idx, class_loader, dex_file);
336 if (method != nullptr) {
337 return method;
338 }
339
340 return delegate_->Compile(code_item, access_flags, invoke_type, class_def_idx, method_idx,
341 class_loader, dex_file);
342}
343
344Compiler* CreateOptimizingCompiler(CompilerDriver* driver) {
345 return new OptimizingCompiler(driver);
346}
347
Nicolas Geoffrayb34f69a2014-03-07 15:28:39 +0000348} // namespace art