blob: 6e85a82849aa392fe29b70d685594a017e8d0fb9 [file] [log] [blame]
Nicolas Geoffrayb34f69a2014-03-07 15:28:39 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampe53c913b2014-08-12 23:19:23 -070017#include "optimizing_compiler.h"
18
Nicolas Geoffrayf635e632014-05-14 09:43:38 +010019#include <fstream>
Nicolas Geoffray787c3072014-03-17 10:20:19 +000020#include <stdint.h>
21
Alexandre Rames44b9cf92015-08-19 15:39:06 +010022#ifdef ART_ENABLE_CODEGEN_arm64
23#include "instruction_simplifier_arm64.h"
24#endif
25
Mark Mendell0616ae02015-04-17 12:49:27 -040026#ifdef ART_ENABLE_CODEGEN_x86
Vladimir Marko0f7dca42015-11-02 14:36:43 +000027#include "pc_relative_fixups_x86.h"
Mark Mendell0616ae02015-04-17 12:49:27 -040028#endif
29
Mathieu Chartiere401d142015-04-22 13:56:20 -070030#include "art_method-inl.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080031#include "base/arena_allocator.h"
Vladimir Markof9f64412015-09-02 14:05:49 +010032#include "base/arena_containers.h"
David Brazdil5e8b1372015-01-23 14:39:08 +000033#include "base/dumpable.h"
Vladimir Markoc90d7c72015-10-06 17:30:45 +000034#include "base/macros.h"
David Brazdil5e8b1372015-01-23 14:39:08 +000035#include "base/timing_logger.h"
David Brazdil46e2a392015-03-16 17:31:52 +000036#include "boolean_simplifier.h"
Mingyao Yangf384f882014-10-22 16:08:18 -070037#include "bounds_check_elimination.h"
Nicolas Geoffray787c3072014-03-17 10:20:19 +000038#include "builder.h"
39#include "code_generator.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000040#include "compiled_method.h"
Andreas Gampe53c913b2014-08-12 23:19:23 -070041#include "compiler.h"
Roland Levillain75be2832014-10-17 17:02:00 +010042#include "constant_folding.h"
43#include "dead_code_elimination.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080044#include "dex/quick/dex_file_to_method_inliner_map.h"
Calin Juravlef1c6d9e2015-04-13 18:42:21 +010045#include "dex/verified_method.h"
46#include "dex/verification_results.h"
Nicolas Geoffray787c3072014-03-17 10:20:19 +000047#include "driver/compiler_driver.h"
Nicolas Geoffray9523a3e2015-07-17 11:51:28 +000048#include "driver/compiler_driver-inl.h"
Vladimir Marko20f85592015-03-19 10:07:02 +000049#include "driver/compiler_options.h"
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +000050#include "driver/dex_compilation_unit.h"
Nicolas Geoffraye2dc6fa2014-11-17 12:55:12 +000051#include "elf_writer_quick.h"
David Brazdil69ba7b72015-06-23 18:27:30 +010052#include "graph_checker.h"
Nicolas Geoffrayf635e632014-05-14 09:43:38 +010053#include "graph_visualizer.h"
Nicolas Geoffrayd31cf3d2014-09-08 17:30:24 +010054#include "gvn.h"
Aart Bik22af3be2015-09-10 12:50:58 -070055#include "induction_var_analysis.h"
Nicolas Geoffraye53798a2014-12-01 10:31:54 +000056#include "inliner.h"
Nicolas Geoffray3c049742014-09-24 18:10:46 +010057#include "instruction_simplifier.h"
Andreas Gampe71fb52f2014-12-29 17:43:08 -080058#include "intrinsics.h"
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +000059#include "jit/jit_code_cache.h"
Nicolas Geoffray82091da2015-01-26 10:02:45 +000060#include "licm.h"
Nicolas Geoffraye2dc6fa2014-11-17 12:55:12 +000061#include "jni/quick/jni_compiler.h"
Mingyao Yang8df69d42015-10-22 15:40:58 -070062#include "load_store_elimination.h"
Nicolas Geoffray787c3072014-03-17 10:20:19 +000063#include "nodes.h"
Nicolas Geoffray26a25ef2014-09-30 13:54:09 +010064#include "prepare_for_register_allocation.h"
Calin Juravlef1c6d9e2015-04-13 18:42:21 +010065#include "reference_type_propagation.h"
Nicolas Geoffraya7062e02014-05-22 12:50:17 +010066#include "register_allocator.h"
Vladimir Markodc151b22015-10-15 18:02:30 +010067#include "sharpening.h"
Nicolas Geoffray827eedb2015-01-26 15:18:36 +000068#include "side_effects_analysis.h"
Nicolas Geoffray31596742014-11-24 15:28:45 +000069#include "ssa_builder.h"
Nicolas Geoffray7dc206a2014-07-11 09:49:49 +010070#include "ssa_phi_elimination.h"
Nicolas Geoffray804d0932014-05-02 08:46:00 +010071#include "ssa_liveness_analysis.h"
David Srbeckyc6b4dd82015-04-07 20:32:43 +010072#include "utils/assembler.h"
Calin Juravle175dc732015-08-25 15:42:32 +010073#include "verifier/method_verifier.h"
Nicolas Geoffrayb34f69a2014-03-07 15:28:39 +000074
75namespace art {
76
Nicolas Geoffray787c3072014-03-17 10:20:19 +000077/**
78 * Used by the code generator, to allocate the code in a vector.
79 */
80class CodeVectorAllocator FINAL : public CodeAllocator {
81 public:
Vladimir Markof9f64412015-09-02 14:05:49 +010082 explicit CodeVectorAllocator(ArenaAllocator* arena)
83 : memory_(arena->Adapter(kArenaAllocCodeBuffer)),
84 size_(0) {}
Nicolas Geoffray787c3072014-03-17 10:20:19 +000085
86 virtual uint8_t* Allocate(size_t size) {
87 size_ = size;
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +000088 memory_.resize(size);
Nicolas Geoffray787c3072014-03-17 10:20:19 +000089 return &memory_[0];
90 }
91
92 size_t GetSize() const { return size_; }
Vladimir Markof9f64412015-09-02 14:05:49 +010093 const ArenaVector<uint8_t>& GetMemory() const { return memory_; }
Nicolas Geoffray787c3072014-03-17 10:20:19 +000094
95 private:
Vladimir Markof9f64412015-09-02 14:05:49 +010096 ArenaVector<uint8_t> memory_;
Nicolas Geoffray787c3072014-03-17 10:20:19 +000097 size_t size_;
98
99 DISALLOW_COPY_AND_ASSIGN(CodeVectorAllocator);
100};
101
Nicolas Geoffrayf635e632014-05-14 09:43:38 +0100102/**
Nicolas Geoffrayf635e632014-05-14 09:43:38 +0100103 * Filter to apply to the visualizer. Methods whose name contain that filter will
David Brazdilee690a32014-12-01 17:04:16 +0000104 * be dumped.
Nicolas Geoffrayf635e632014-05-14 09:43:38 +0100105 */
Andreas Gampe53fcd0f2015-07-22 12:10:13 -0700106static constexpr const char kStringFilter[] = "";
Nicolas Geoffrayf635e632014-05-14 09:43:38 +0100107
David Brazdil69ba7b72015-06-23 18:27:30 +0100108class PassScope;
David Brazdil809658e2015-02-05 11:34:02 +0000109
David Brazdil69ba7b72015-06-23 18:27:30 +0100110class PassObserver : public ValueObject {
David Brazdil5e8b1372015-01-23 14:39:08 +0000111 public:
David Brazdil69ba7b72015-06-23 18:27:30 +0100112 PassObserver(HGraph* graph,
113 const char* method_name,
114 CodeGenerator* codegen,
115 std::ostream* visualizer_output,
116 CompilerDriver* compiler_driver)
117 : graph_(graph),
118 method_name_(method_name),
David Brazdil809658e2015-02-05 11:34:02 +0000119 timing_logger_enabled_(compiler_driver->GetDumpPasses()),
David Brazdil5e8b1372015-01-23 14:39:08 +0000120 timing_logger_(method_name, true, true),
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100121 disasm_info_(graph->GetArena()),
David Brazdil809658e2015-02-05 11:34:02 +0000122 visualizer_enabled_(!compiler_driver->GetDumpCfgFileName().empty()),
David Brazdil69ba7b72015-06-23 18:27:30 +0100123 visualizer_(visualizer_output, graph, *codegen),
124 graph_in_bad_state_(false) {
Andreas Gampe53fcd0f2015-07-22 12:10:13 -0700125 if (timing_logger_enabled_ || visualizer_enabled_) {
126 if (!IsVerboseMethod(compiler_driver, method_name)) {
127 timing_logger_enabled_ = visualizer_enabled_ = false;
128 }
129 if (visualizer_enabled_) {
130 visualizer_.PrintHeader(method_name_);
131 codegen->SetDisassemblyInformation(&disasm_info_);
132 }
David Brazdil62e074f2015-04-07 18:09:37 +0100133 }
David Brazdil5e8b1372015-01-23 14:39:08 +0000134 }
135
David Brazdil69ba7b72015-06-23 18:27:30 +0100136 ~PassObserver() {
David Brazdil5e8b1372015-01-23 14:39:08 +0000137 if (timing_logger_enabled_) {
David Brazdil5e8b1372015-01-23 14:39:08 +0000138 LOG(INFO) << "TIMINGS " << method_name_;
139 LOG(INFO) << Dumpable<TimingLogger>(timing_logger_);
140 }
141 }
142
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100143 void DumpDisassembly() const {
144 if (visualizer_enabled_) {
145 visualizer_.DumpGraphWithDisassembly();
146 }
147 }
148
David Brazdil69ba7b72015-06-23 18:27:30 +0100149 void SetGraphInBadState() { graph_in_bad_state_ = true; }
150
David Brazdil5e8b1372015-01-23 14:39:08 +0000151 private:
David Brazdil809658e2015-02-05 11:34:02 +0000152 void StartPass(const char* pass_name) {
153 // Dump graph first, then start timer.
154 if (visualizer_enabled_) {
David Brazdilffee3d32015-07-06 11:48:53 +0100155 visualizer_.DumpGraph(pass_name, /* is_after_pass */ false, graph_in_bad_state_);
David Brazdil809658e2015-02-05 11:34:02 +0000156 }
157 if (timing_logger_enabled_) {
158 timing_logger_.StartTiming(pass_name);
159 }
160 }
161
162 void EndPass(const char* pass_name) {
163 // Pause timer first, then dump graph.
164 if (timing_logger_enabled_) {
165 timing_logger_.EndTiming();
166 }
167 if (visualizer_enabled_) {
David Brazdilffee3d32015-07-06 11:48:53 +0100168 visualizer_.DumpGraph(pass_name, /* is_after_pass */ true, graph_in_bad_state_);
David Brazdil809658e2015-02-05 11:34:02 +0000169 }
David Brazdil69ba7b72015-06-23 18:27:30 +0100170
171 // Validate the HGraph if running in debug mode.
172 if (kIsDebugBuild) {
173 if (!graph_in_bad_state_) {
174 if (graph_->IsInSsaForm()) {
Vladimir Marko655e5852015-10-12 10:38:28 +0100175 SSAChecker checker(graph_);
David Brazdil69ba7b72015-06-23 18:27:30 +0100176 checker.Run();
177 if (!checker.IsValid()) {
178 LOG(FATAL) << "Error after " << pass_name << ": " << Dumpable<SSAChecker>(checker);
179 }
180 } else {
Vladimir Marko655e5852015-10-12 10:38:28 +0100181 GraphChecker checker(graph_);
David Brazdil69ba7b72015-06-23 18:27:30 +0100182 checker.Run();
183 if (!checker.IsValid()) {
184 LOG(FATAL) << "Error after " << pass_name << ": " << Dumpable<GraphChecker>(checker);
185 }
186 }
187 }
188 }
David Brazdil809658e2015-02-05 11:34:02 +0000189 }
190
Andreas Gampe53fcd0f2015-07-22 12:10:13 -0700191 static bool IsVerboseMethod(CompilerDriver* compiler_driver, const char* method_name) {
192 // Test an exact match to --verbose-methods. If verbose-methods is set, this overrides an
193 // empty kStringFilter matching all methods.
194 if (compiler_driver->GetCompilerOptions().HasVerboseMethods()) {
195 return compiler_driver->GetCompilerOptions().IsVerboseMethod(method_name);
196 }
197
198 // Test the kStringFilter sub-string. constexpr helper variable to silence unreachable-code
199 // warning when the string is empty.
200 constexpr bool kStringFilterEmpty = arraysize(kStringFilter) <= 1;
201 if (kStringFilterEmpty || strstr(method_name, kStringFilter) != nullptr) {
202 return true;
203 }
204
205 return false;
206 }
207
David Brazdil69ba7b72015-06-23 18:27:30 +0100208 HGraph* const graph_;
David Brazdil5e8b1372015-01-23 14:39:08 +0000209 const char* method_name_;
210
211 bool timing_logger_enabled_;
David Brazdil5e8b1372015-01-23 14:39:08 +0000212 TimingLogger timing_logger_;
213
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100214 DisassemblyInformation disasm_info_;
215
David Brazdil5e8b1372015-01-23 14:39:08 +0000216 bool visualizer_enabled_;
217 HGraphVisualizer visualizer_;
218
David Brazdil69ba7b72015-06-23 18:27:30 +0100219 // Flag to be set by the compiler if the pass failed and the graph is not
220 // expected to validate.
221 bool graph_in_bad_state_;
David Brazdil809658e2015-02-05 11:34:02 +0000222
David Brazdil69ba7b72015-06-23 18:27:30 +0100223 friend PassScope;
224
225 DISALLOW_COPY_AND_ASSIGN(PassObserver);
David Brazdil5e8b1372015-01-23 14:39:08 +0000226};
227
David Brazdil69ba7b72015-06-23 18:27:30 +0100228class PassScope : public ValueObject {
David Brazdil809658e2015-02-05 11:34:02 +0000229 public:
David Brazdil69ba7b72015-06-23 18:27:30 +0100230 PassScope(const char *pass_name, PassObserver* pass_observer)
David Brazdil809658e2015-02-05 11:34:02 +0000231 : pass_name_(pass_name),
David Brazdil69ba7b72015-06-23 18:27:30 +0100232 pass_observer_(pass_observer) {
233 pass_observer_->StartPass(pass_name_);
David Brazdil809658e2015-02-05 11:34:02 +0000234 }
235
David Brazdil69ba7b72015-06-23 18:27:30 +0100236 ~PassScope() {
237 pass_observer_->EndPass(pass_name_);
David Brazdil809658e2015-02-05 11:34:02 +0000238 }
239
240 private:
241 const char* const pass_name_;
David Brazdil69ba7b72015-06-23 18:27:30 +0100242 PassObserver* const pass_observer_;
David Brazdil809658e2015-02-05 11:34:02 +0000243};
244
Andreas Gampe53c913b2014-08-12 23:19:23 -0700245class OptimizingCompiler FINAL : public Compiler {
246 public:
247 explicit OptimizingCompiler(CompilerDriver* driver);
Nicolas Geoffray88157ef2014-09-12 10:29:53 +0100248 ~OptimizingCompiler();
Andreas Gampe53c913b2014-08-12 23:19:23 -0700249
250 bool CanCompileMethod(uint32_t method_idx, const DexFile& dex_file, CompilationUnit* cu) const
251 OVERRIDE;
252
253 CompiledMethod* Compile(const DexFile::CodeItem* code_item,
254 uint32_t access_flags,
255 InvokeType invoke_type,
256 uint16_t class_def_idx,
257 uint32_t method_idx,
258 jobject class_loader,
Mathieu Chartier736b5602015-09-02 14:54:11 -0700259 const DexFile& dex_file,
260 Handle<mirror::DexCache> dex_cache) const OVERRIDE;
Andreas Gampe53c913b2014-08-12 23:19:23 -0700261
Andreas Gampe53c913b2014-08-12 23:19:23 -0700262 CompiledMethod* JniCompile(uint32_t access_flags,
263 uint32_t method_idx,
Nicolas Geoffray216eaa22015-03-17 17:09:30 +0000264 const DexFile& dex_file) const OVERRIDE {
265 return ArtQuickJniCompileMethod(GetCompilerDriver(), access_flags, method_idx, dex_file);
266 }
Andreas Gampe53c913b2014-08-12 23:19:23 -0700267
Mathieu Chartiere401d142015-04-22 13:56:20 -0700268 uintptr_t GetEntryPointOf(ArtMethod* method) const OVERRIDE
Mathieu Chartier90443472015-07-16 20:32:27 -0700269 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray216eaa22015-03-17 17:09:30 +0000270 return reinterpret_cast<uintptr_t>(method->GetEntryPointFromQuickCompiledCodePtrSize(
271 InstructionSetPointerSize(GetCompilerDriver()->GetInstructionSet())));
272 }
Andreas Gampe53c913b2014-08-12 23:19:23 -0700273
Nicolas Geoffray216eaa22015-03-17 17:09:30 +0000274 void InitCompilationUnit(CompilationUnit& cu) const OVERRIDE;
Andreas Gampe53c913b2014-08-12 23:19:23 -0700275
David Brazdilee690a32014-12-01 17:04:16 +0000276 void Init() OVERRIDE;
Andreas Gampe53c913b2014-08-12 23:19:23 -0700277
Nicolas Geoffray216eaa22015-03-17 17:09:30 +0000278 void UnInit() const OVERRIDE;
Andreas Gampe53c913b2014-08-12 23:19:23 -0700279
Calin Juravle2be39e02015-04-21 13:56:34 +0100280 void MaybeRecordStat(MethodCompilationStat compilation_stat) const {
281 if (compilation_stats_.get() != nullptr) {
282 compilation_stats_->RecordStat(compilation_stat);
283 }
284 }
285
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000286 bool JitCompile(Thread* self, jit::JitCodeCache* code_cache, ArtMethod* method)
287 OVERRIDE
288 SHARED_REQUIRES(Locks::mutator_lock_);
289
Andreas Gampe53c913b2014-08-12 23:19:23 -0700290 private:
Nicolas Geoffray88157ef2014-09-12 10:29:53 +0100291 // Whether we should run any optimization or register allocation. If false, will
292 // just run the code generation after the graph was built.
293 const bool run_optimizations_;
Calin Juravle48c2b032014-12-09 18:11:36 +0000294
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000295 // Create a 'CompiledMethod' for an optimized graph.
296 CompiledMethod* EmitOptimized(ArenaAllocator* arena,
297 CodeVectorAllocator* code_allocator,
298 CodeGenerator* codegen,
299 CompilerDriver* driver) const;
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000300
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000301 // Create a 'CompiledMethod' for a non-optimized graph.
302 CompiledMethod* EmitBaseline(ArenaAllocator* arena,
303 CodeVectorAllocator* code_allocator,
304 CodeGenerator* codegen,
305 CompilerDriver* driver) const;
306
307 // Try compiling a method and return the code generator used for
308 // compiling it.
309 // This method:
310 // 1) Builds the graph. Returns null if it failed to build it.
311 // 2) If `run_optimizations_` is set:
312 // 2.1) Transform the graph to SSA. Returns null if it failed.
313 // 2.2) Run optimizations on the graph, including register allocator.
314 // 3) Generate code with the `code_allocator` provided.
315 CodeGenerator* TryCompile(ArenaAllocator* arena,
316 CodeVectorAllocator* code_allocator,
317 const DexFile::CodeItem* code_item,
318 uint32_t access_flags,
319 InvokeType invoke_type,
320 uint16_t class_def_idx,
321 uint32_t method_idx,
322 jobject class_loader,
323 const DexFile& dex_file,
324 Handle<mirror::DexCache> dex_cache) const;
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000325
Calin Juravle2be39e02015-04-21 13:56:34 +0100326 std::unique_ptr<OptimizingCompilerStats> compilation_stats_;
Nicolas Geoffray88157ef2014-09-12 10:29:53 +0100327
Andreas Gampe53c913b2014-08-12 23:19:23 -0700328 std::unique_ptr<std::ostream> visualizer_output_;
329
Andreas Gampe53c913b2014-08-12 23:19:23 -0700330 DISALLOW_COPY_AND_ASSIGN(OptimizingCompiler);
331};
332
Nicolas Geoffray88157ef2014-09-12 10:29:53 +0100333static const int kMaximumCompilationTimeBeforeWarning = 100; /* ms */
334
335OptimizingCompiler::OptimizingCompiler(CompilerDriver* driver)
336 : Compiler(driver, kMaximumCompilationTimeBeforeWarning),
337 run_optimizations_(
Nicolas Geoffrayda3a55b2015-10-02 11:56:50 +0100338 driver->GetCompilerOptions().GetCompilerFilter() != CompilerOptions::kTime) {}
David Brazdilee690a32014-12-01 17:04:16 +0000339
340void OptimizingCompiler::Init() {
341 // Enable C1visualizer output. Must be done in Init() because the compiler
342 // driver is not fully initialized when passed to the compiler's constructor.
343 CompilerDriver* driver = GetCompilerDriver();
David Brazdil866c0312015-01-13 21:21:31 +0000344 const std::string cfg_file_name = driver->GetDumpCfgFileName();
345 if (!cfg_file_name.empty()) {
David Brazdilee690a32014-12-01 17:04:16 +0000346 CHECK_EQ(driver->GetThreadCount(), 1U)
347 << "Graph visualizer requires the compiler to run single-threaded. "
348 << "Invoke the compiler with '-j1'.";
Calin Juravle87000a92015-08-24 15:34:44 +0100349 std::ios_base::openmode cfg_file_mode =
350 driver->GetDumpCfgAppend() ? std::ofstream::app : std::ofstream::out;
351 visualizer_output_.reset(new std::ofstream(cfg_file_name, cfg_file_mode));
Nicolas Geoffrayf635e632014-05-14 09:43:38 +0100352 }
Calin Juravle2be39e02015-04-21 13:56:34 +0100353 if (driver->GetDumpStats()) {
354 compilation_stats_.reset(new OptimizingCompilerStats());
355 }
Nicolas Geoffrayf635e632014-05-14 09:43:38 +0100356}
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000357
Nicolas Geoffray216eaa22015-03-17 17:09:30 +0000358void OptimizingCompiler::UnInit() const {
Nicolas Geoffray216eaa22015-03-17 17:09:30 +0000359}
360
Nicolas Geoffray88157ef2014-09-12 10:29:53 +0100361OptimizingCompiler::~OptimizingCompiler() {
Calin Juravle2be39e02015-04-21 13:56:34 +0100362 if (compilation_stats_.get() != nullptr) {
363 compilation_stats_->Log();
364 }
Nicolas Geoffray88157ef2014-09-12 10:29:53 +0100365}
366
Nicolas Geoffrayda3a55b2015-10-02 11:56:50 +0100367void OptimizingCompiler::InitCompilationUnit(CompilationUnit& cu ATTRIBUTE_UNUSED) const {
Nicolas Geoffray216eaa22015-03-17 17:09:30 +0000368}
369
Nicolas Geoffraye2dc6fa2014-11-17 12:55:12 +0000370bool OptimizingCompiler::CanCompileMethod(uint32_t method_idx ATTRIBUTE_UNUSED,
371 const DexFile& dex_file ATTRIBUTE_UNUSED,
372 CompilationUnit* cu ATTRIBUTE_UNUSED) const {
373 return true;
Andreas Gampe53c913b2014-08-12 23:19:23 -0700374}
375
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +0000376static bool IsInstructionSetSupported(InstructionSet instruction_set) {
Calin Juravlecff8cc72015-10-09 12:03:24 +0100377 return (instruction_set == kArm && !kArm32QuickCodeUseSoftFloat)
378 || instruction_set == kArm64
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +0000379 || (instruction_set == kThumb2 && !kArm32QuickCodeUseSoftFloat)
Goran Jakovljevicf652cec2015-08-25 16:11:42 +0200380 || instruction_set == kMips
Alexey Frunze4dda3372015-06-01 18:31:49 -0700381 || instruction_set == kMips64
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +0000382 || instruction_set == kX86
383 || instruction_set == kX86_64;
384}
385
Calin Juravle10e244f2015-01-26 18:54:32 +0000386static void RunOptimizations(HOptimization* optimizations[],
387 size_t length,
David Brazdil69ba7b72015-06-23 18:27:30 +0100388 PassObserver* pass_observer) {
Calin Juravle10e244f2015-01-26 18:54:32 +0000389 for (size_t i = 0; i < length; ++i) {
David Brazdil69ba7b72015-06-23 18:27:30 +0100390 PassScope scope(optimizations[i]->GetPassName(), pass_observer);
391 optimizations[i]->Run();
Calin Juravle10e244f2015-01-26 18:54:32 +0000392 }
393}
394
Calin Juravleec748352015-07-29 13:52:12 +0100395static void MaybeRunInliner(HGraph* graph,
Vladimir Markodc151b22015-10-15 18:02:30 +0100396 CodeGenerator* codegen,
Calin Juravleec748352015-07-29 13:52:12 +0100397 CompilerDriver* driver,
398 OptimizingCompilerStats* stats,
399 const DexCompilationUnit& dex_compilation_unit,
400 PassObserver* pass_observer,
401 StackHandleScopeCollection* handles) {
402 const CompilerOptions& compiler_options = driver->GetCompilerOptions();
403 bool should_inline = (compiler_options.GetInlineDepthLimit() > 0)
404 && (compiler_options.GetInlineMaxCodeUnits() > 0);
405 if (!should_inline) {
406 return;
407 }
408
Calin Juravle27174362015-10-26 12:38:43 +0000409 ArenaAllocator* arena = graph->GetArena();
410 HInliner* inliner = new (arena) HInliner(
Vladimir Markodc151b22015-10-15 18:02:30 +0100411 graph, codegen, dex_compilation_unit, dex_compilation_unit, driver, handles, stats);
Calin Juravle27174362015-10-26 12:38:43 +0000412 ReferenceTypePropagation* type_propagation =
413 new (arena) ReferenceTypePropagation(graph, handles,
414 "reference_type_propagation_after_inlining");
415
416 HOptimization* optimizations[] = {
417 inliner,
418 // Run another type propagation phase: inlining will open up more opportunities
419 // to remove checkcast/instanceof and null checks.
420 type_propagation,
421 };
Calin Juravleec748352015-07-29 13:52:12 +0100422
423 RunOptimizations(optimizations, arraysize(optimizations), pass_observer);
424}
425
Alexandre Rames44b9cf92015-08-19 15:39:06 +0100426static void RunArchOptimizations(InstructionSet instruction_set,
427 HGraph* graph,
428 OptimizingCompilerStats* stats,
429 PassObserver* pass_observer) {
430 ArenaAllocator* arena = graph->GetArena();
431 switch (instruction_set) {
432#ifdef ART_ENABLE_CODEGEN_arm64
433 case kArm64: {
434 arm64::InstructionSimplifierArm64* simplifier =
435 new (arena) arm64::InstructionSimplifierArm64(graph, stats);
436 SideEffectsAnalysis* side_effects = new (arena) SideEffectsAnalysis(graph);
437 GVNOptimization* gvn = new (arena) GVNOptimization(graph, *side_effects, "GVN_after_arch");
438 HOptimization* arm64_optimizations[] = {
439 simplifier,
440 side_effects,
441 gvn
442 };
443 RunOptimizations(arm64_optimizations, arraysize(arm64_optimizations), pass_observer);
444 break;
445 }
446#endif
Mark Mendell0616ae02015-04-17 12:49:27 -0400447#ifdef ART_ENABLE_CODEGEN_x86
448 case kX86: {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000449 x86::PcRelativeFixups* pc_relative_fixups = new (arena) x86::PcRelativeFixups(graph, stats);
Mark Mendell0616ae02015-04-17 12:49:27 -0400450 HOptimization* x86_optimizations[] = {
Vladimir Marko0f7dca42015-11-02 14:36:43 +0000451 pc_relative_fixups
Mark Mendell0616ae02015-04-17 12:49:27 -0400452 };
453 RunOptimizations(x86_optimizations, arraysize(x86_optimizations), pass_observer);
454 break;
455 }
456#endif
Alexandre Rames44b9cf92015-08-19 15:39:06 +0100457 default:
458 break;
459 }
460}
461
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000462NO_INLINE // Avoid increasing caller's frame size by large stack-allocated objects.
463static void AllocateRegisters(HGraph* graph,
464 CodeGenerator* codegen,
465 PassObserver* pass_observer) {
466 PrepareForRegisterAllocation(graph).Run();
467 SsaLivenessAnalysis liveness(graph, codegen);
468 {
469 PassScope scope(SsaLivenessAnalysis::kLivenessPassName, pass_observer);
470 liveness.Analyze();
471 }
472 {
473 PassScope scope(RegisterAllocator::kRegisterAllocatorPassName, pass_observer);
474 RegisterAllocator(graph->GetArena(), codegen, liveness).AllocateRegisters();
475 }
476}
477
Nicolas Geoffraye53798a2014-12-01 10:31:54 +0000478static void RunOptimizations(HGraph* graph,
Vladimir Markodc151b22015-10-15 18:02:30 +0100479 CodeGenerator* codegen,
Nicolas Geoffraye53798a2014-12-01 10:31:54 +0000480 CompilerDriver* driver,
481 OptimizingCompilerStats* stats,
482 const DexCompilationUnit& dex_compilation_unit,
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000483 PassObserver* pass_observer) {
484 ScopedObjectAccess soa(Thread::Current());
485 StackHandleScopeCollection handles(soa.Self());
486 ScopedThreadSuspension sts(soa.Self(), kNative);
487
Vladimir Markoa3a3c592015-06-12 14:30:53 +0100488 ArenaAllocator* arena = graph->GetArena();
489 HDeadCodeElimination* dce1 = new (arena) HDeadCodeElimination(
490 graph, stats, HDeadCodeElimination::kInitialDeadCodeEliminationPassName);
491 HDeadCodeElimination* dce2 = new (arena) HDeadCodeElimination(
492 graph, stats, HDeadCodeElimination::kFinalDeadCodeEliminationPassName);
493 HConstantFolding* fold1 = new (arena) HConstantFolding(graph);
494 InstructionSimplifier* simplify1 = new (arena) InstructionSimplifier(graph, stats);
495 HBooleanSimplifier* boolean_simplify = new (arena) HBooleanSimplifier(graph);
Vladimir Markoa3a3c592015-06-12 14:30:53 +0100496 HConstantFolding* fold2 = new (arena) HConstantFolding(graph, "constant_folding_after_inlining");
497 SideEffectsAnalysis* side_effects = new (arena) SideEffectsAnalysis(graph);
498 GVNOptimization* gvn = new (arena) GVNOptimization(graph, *side_effects);
499 LICM* licm = new (arena) LICM(graph, *side_effects);
Mingyao Yang8df69d42015-10-22 15:40:58 -0700500 LoadStoreElimination* lse = new (arena) LoadStoreElimination(graph, *side_effects);
Aart Bik22af3be2015-09-10 12:50:58 -0700501 HInductionVarAnalysis* induction = new (arena) HInductionVarAnalysis(graph);
502 BoundsCheckElimination* bce = new (arena) BoundsCheckElimination(graph, induction);
Vladimir Markoa3a3c592015-06-12 14:30:53 +0100503 ReferenceTypePropagation* type_propagation =
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000504 new (arena) ReferenceTypePropagation(graph, &handles);
Vladimir Markodc151b22015-10-15 18:02:30 +0100505 HSharpening* sharpening = new (arena) HSharpening(graph, codegen, dex_compilation_unit, driver);
Vladimir Markoa3a3c592015-06-12 14:30:53 +0100506 InstructionSimplifier* simplify2 = new (arena) InstructionSimplifier(
507 graph, stats, "instruction_simplifier_after_types");
508 InstructionSimplifier* simplify3 = new (arena) InstructionSimplifier(
Nicolas Geoffrayb2bdfce2015-06-18 15:46:47 +0100509 graph, stats, "instruction_simplifier_after_bce");
Nicolas Geoffrayb2bdfce2015-06-18 15:46:47 +0100510 InstructionSimplifier* simplify4 = new (arena) InstructionSimplifier(
511 graph, stats, "instruction_simplifier_before_codegen");
Nicolas Geoffray5e6916c2014-11-18 16:53:35 +0000512
Vladimir Markoa3a3c592015-06-12 14:30:53 +0100513 IntrinsicsRecognizer* intrinsics = new (arena) IntrinsicsRecognizer(graph, driver);
Andreas Gampe71fb52f2014-12-29 17:43:08 -0800514
Calin Juravleec748352015-07-29 13:52:12 +0100515 HOptimization* optimizations1[] = {
Vladimir Markoa3a3c592015-06-12 14:30:53 +0100516 intrinsics,
Vladimir Markoa3a3c592015-06-12 14:30:53 +0100517 fold1,
518 simplify1,
519 type_propagation,
Vladimir Markodc151b22015-10-15 18:02:30 +0100520 sharpening,
Nicolas Geoffray18e68732015-06-17 23:09:05 +0100521 dce1,
Calin Juravleec748352015-07-29 13:52:12 +0100522 simplify2
523 };
524
525 RunOptimizations(optimizations1, arraysize(optimizations1), pass_observer);
526
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000527 MaybeRunInliner(graph, codegen, driver, stats, dex_compilation_unit, pass_observer, &handles);
David Brazdil95177982015-10-30 12:56:58 -0500528
David Brazdil77a48ae2015-09-15 12:34:04 +0000529 // TODO: Update passes incompatible with try/catch so we have the same
530 // pipeline for all methods.
David Brazdil771e5cc2015-10-29 22:47:39 +0000531 if (graph->HasTryCatch()) {
532 HOptimization* optimizations2[] = {
533 side_effects,
534 gvn,
535 dce2,
536 // The codegen has a few assumptions that only the instruction simplifier
537 // can satisfy. For example, the code generator does not expect to see a
538 // HTypeConversion from a type to the same type.
539 simplify4,
540 };
541
542 RunOptimizations(optimizations2, arraysize(optimizations2), pass_observer);
543 } else {
David Brazdil771e5cc2015-10-29 22:47:39 +0000544 HOptimization* optimizations2[] = {
545 // BooleanSimplifier depends on the InstructionSimplifier removing
546 // redundant suspend checks to recognize empty blocks.
547 boolean_simplify,
548 fold2, // TODO: if we don't inline we can also skip fold2.
549 side_effects,
550 gvn,
551 licm,
552 induction,
553 bce,
554 simplify3,
555 lse,
556 dce2,
557 // The codegen has a few assumptions that only the instruction simplifier
558 // can satisfy. For example, the code generator does not expect to see a
559 // HTypeConversion from a type to the same type.
560 simplify4,
561 };
562
563 RunOptimizations(optimizations2, arraysize(optimizations2), pass_observer);
David Brazdilbbd733e2015-08-18 17:48:17 +0100564 }
565
Alexandre Rames44b9cf92015-08-19 15:39:06 +0100566 RunArchOptimizations(driver->GetInstructionSet(), graph, stats, pass_observer);
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000567 AllocateRegisters(graph, codegen, pass_observer);
Nicolas Geoffray5e6916c2014-11-18 16:53:35 +0000568}
569
Nicolas Geoffray376b2bb2014-12-09 14:26:32 +0000570// The stack map we generate must be 4-byte aligned on ARM. Since existing
571// maps are generated alongside these stack maps, we must also align them.
Vladimir Markof9f64412015-09-02 14:05:49 +0100572static ArrayRef<const uint8_t> AlignVectorSize(ArenaVector<uint8_t>& vector) {
Nicolas Geoffray376b2bb2014-12-09 14:26:32 +0000573 size_t size = vector.size();
574 size_t aligned_size = RoundUp(size, 4);
575 for (; size < aligned_size; ++size) {
576 vector.push_back(0);
577 }
Andreas Gampee21dc3d2014-12-08 16:59:43 -0800578 return ArrayRef<const uint8_t>(vector);
Nicolas Geoffray376b2bb2014-12-09 14:26:32 +0000579}
580
Vladimir Marko58155012015-08-19 12:49:41 +0000581static ArenaVector<LinkerPatch> EmitAndSortLinkerPatches(CodeGenerator* codegen) {
582 ArenaVector<LinkerPatch> linker_patches(codegen->GetGraph()->GetArena()->Adapter());
583 codegen->EmitLinkerPatches(&linker_patches);
584
585 // Sort patches by literal offset. Required for .oat_patches encoding.
586 std::sort(linker_patches.begin(), linker_patches.end(),
587 [](const LinkerPatch& lhs, const LinkerPatch& rhs) {
588 return lhs.LiteralOffset() < rhs.LiteralOffset();
589 });
590
591 return linker_patches;
592}
593
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000594CompiledMethod* OptimizingCompiler::EmitOptimized(ArenaAllocator* arena,
595 CodeVectorAllocator* code_allocator,
596 CodeGenerator* codegen,
597 CompilerDriver* compiler_driver) const {
Vladimir Marko58155012015-08-19 12:49:41 +0000598 ArenaVector<LinkerPatch> linker_patches = EmitAndSortLinkerPatches(codegen);
Vladimir Markof9f64412015-09-02 14:05:49 +0100599 ArenaVector<uint8_t> stack_map(arena->Adapter(kArenaAllocStackMaps));
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000600 stack_map.resize(codegen->ComputeStackMapsSize());
601 codegen->BuildStackMaps(MemoryRegion(stack_map.data(), stack_map.size()));
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000602
Calin Juravle2be39e02015-04-21 13:56:34 +0100603 MaybeRecordStat(MethodCompilationStat::kCompiledOptimized);
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000604
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100605 CompiledMethod* compiled_method = CompiledMethod::SwapAllocCompiledMethod(
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000606 compiler_driver,
607 codegen->GetInstructionSet(),
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000608 ArrayRef<const uint8_t>(code_allocator->GetMemory()),
Roland Levillainaa9b7c42015-02-17 15:40:09 +0000609 // Follow Quick's behavior and set the frame size to zero if it is
610 // considered "empty" (see the definition of
611 // art::CodeGenerator::HasEmptyFrame).
612 codegen->HasEmptyFrame() ? 0 : codegen->GetFrameSize(),
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000613 codegen->GetCoreSpillMask(),
Nicolas Geoffrayd97dc402015-01-22 13:50:01 +0000614 codegen->GetFpuSpillMask(),
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000615 ArrayRef<const SrcMapElem>(codegen->GetSrcMappingTable()),
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100616 ArrayRef<const uint8_t>(), // mapping_table.
617 ArrayRef<const uint8_t>(stack_map),
618 ArrayRef<const uint8_t>(), // native_gc_map.
619 ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()),
Vladimir Marko58155012015-08-19 12:49:41 +0000620 ArrayRef<const LinkerPatch>(linker_patches));
Mathieu Chartiered150002015-08-28 11:16:54 -0700621
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100622 return compiled_method;
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000623}
624
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000625CompiledMethod* OptimizingCompiler::EmitBaseline(
626 ArenaAllocator* arena,
627 CodeVectorAllocator* code_allocator,
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000628 CodeGenerator* codegen,
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000629 CompilerDriver* compiler_driver) const {
Vladimir Marko58155012015-08-19 12:49:41 +0000630 ArenaVector<LinkerPatch> linker_patches = EmitAndSortLinkerPatches(codegen);
631
Vladimir Markof9f64412015-09-02 14:05:49 +0100632 ArenaVector<uint8_t> mapping_table(arena->Adapter(kArenaAllocBaselineMaps));
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100633 codegen->BuildMappingTable(&mapping_table);
Vladimir Markof9f64412015-09-02 14:05:49 +0100634 ArenaVector<uint8_t> vmap_table(arena->Adapter(kArenaAllocBaselineMaps));
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000635 codegen->BuildVMapTable(&vmap_table);
Vladimir Markof9f64412015-09-02 14:05:49 +0100636 ArenaVector<uint8_t> gc_map(arena->Adapter(kArenaAllocBaselineMaps));
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000637 codegen->BuildNativeGCMap(&gc_map, *compiler_driver);
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000638
Calin Juravle2be39e02015-04-21 13:56:34 +0100639 MaybeRecordStat(MethodCompilationStat::kCompiledBaseline);
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100640 CompiledMethod* compiled_method = CompiledMethod::SwapAllocCompiledMethod(
Roland Levillainaa9b7c42015-02-17 15:40:09 +0000641 compiler_driver,
642 codegen->GetInstructionSet(),
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000643 ArrayRef<const uint8_t>(code_allocator->GetMemory()),
Roland Levillainaa9b7c42015-02-17 15:40:09 +0000644 // Follow Quick's behavior and set the frame size to zero if it is
645 // considered "empty" (see the definition of
646 // art::CodeGenerator::HasEmptyFrame).
647 codegen->HasEmptyFrame() ? 0 : codegen->GetFrameSize(),
648 codegen->GetCoreSpillMask(),
649 codegen->GetFpuSpillMask(),
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000650 ArrayRef<const SrcMapElem>(codegen->GetSrcMappingTable()),
Roland Levillainaa9b7c42015-02-17 15:40:09 +0000651 AlignVectorSize(mapping_table),
652 AlignVectorSize(vmap_table),
653 AlignVectorSize(gc_map),
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100654 ArrayRef<const uint8_t>(*codegen->GetAssembler()->cfi().data()),
Vladimir Marko58155012015-08-19 12:49:41 +0000655 ArrayRef<const LinkerPatch>(linker_patches));
Alexandre Rameseb7b7392015-06-19 14:47:01 +0100656 return compiled_method;
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000657}
658
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000659CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* arena,
660 CodeVectorAllocator* code_allocator,
661 const DexFile::CodeItem* code_item,
662 uint32_t access_flags,
663 InvokeType invoke_type,
664 uint16_t class_def_idx,
665 uint32_t method_idx,
666 jobject class_loader,
667 const DexFile& dex_file,
668 Handle<mirror::DexCache> dex_cache) const {
David Brazdil5e8b1372015-01-23 14:39:08 +0000669 std::string method_name = PrettyMethod(method_idx, dex_file);
Calin Juravle2be39e02015-04-21 13:56:34 +0100670 MaybeRecordStat(MethodCompilationStat::kAttemptCompilation);
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000671 CompilerDriver* compiler_driver = GetCompilerDriver();
672 InstructionSet instruction_set = compiler_driver->GetInstructionSet();
Calin Juravlecff8cc72015-10-09 12:03:24 +0100673
Nicolas Geoffray8d486732014-07-16 16:23:40 +0100674 // Always use the thumb2 assembler: some runtime functionality (like implicit stack
675 // overflow checks) assume thumb2.
676 if (instruction_set == kArm) {
677 instruction_set = kThumb2;
Nicolas Geoffray8fb5ce32014-07-04 09:43:26 +0100678 }
679
680 // Do not attempt to compile on architectures we do not support.
Nicolas Geoffray1ba0f592014-10-27 15:14:55 +0000681 if (!IsInstructionSetSupported(instruction_set)) {
Calin Juravle2be39e02015-04-21 13:56:34 +0100682 MaybeRecordStat(MethodCompilationStat::kNotCompiledUnsupportedIsa);
Nicolas Geoffray8fb5ce32014-07-04 09:43:26 +0100683 return nullptr;
684 }
685
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000686 if (Compiler::IsPathologicalCase(*code_item, method_idx, dex_file)) {
Calin Juravle2be39e02015-04-21 13:56:34 +0100687 MaybeRecordStat(MethodCompilationStat::kNotCompiledPathological);
Nicolas Geoffrayb5f62b32014-10-30 10:58:41 +0000688 return nullptr;
689 }
690
Nicolas Geoffray36540cb2015-03-23 14:45:53 +0000691 // Implementation of the space filter: do not compile a code item whose size in
Nicolas Geoffray432bf3d2015-07-17 11:11:09 +0100692 // code units is bigger than 128.
693 static constexpr size_t kSpaceFilterOptimizingThreshold = 128;
Nicolas Geoffray36540cb2015-03-23 14:45:53 +0000694 const CompilerOptions& compiler_options = compiler_driver->GetCompilerOptions();
695 if ((compiler_options.GetCompilerFilter() == CompilerOptions::kSpace)
696 && (code_item->insns_size_in_code_units_ > kSpaceFilterOptimizingThreshold)) {
Calin Juravle2be39e02015-04-21 13:56:34 +0100697 MaybeRecordStat(MethodCompilationStat::kNotCompiledSpaceFilter);
Nicolas Geoffray36540cb2015-03-23 14:45:53 +0000698 return nullptr;
699 }
700
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +0000701 DexCompilationUnit dex_compilation_unit(
Nicolas Geoffray9523a3e2015-07-17 11:51:28 +0000702 nullptr, class_loader, Runtime::Current()->GetClassLinker(), dex_file, code_item,
Ian Rogers72d32622014-05-06 16:20:11 -0700703 class_def_idx, method_idx, access_flags,
Mathieu Chartier736b5602015-09-02 14:54:11 -0700704 compiler_driver->GetVerifiedMethod(&dex_file, method_idx), dex_cache);
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +0000705
Calin Juravle3cd4fc82015-05-14 15:15:42 +0100706 bool requires_barrier = dex_compilation_unit.IsConstructor()
707 && compiler_driver->RequiresConstructorBarrier(Thread::Current(),
708 dex_compilation_unit.GetDexFile(),
709 dex_compilation_unit.GetClassDefIndex());
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000710 HGraph* graph = new (arena) HGraph(
711 arena, dex_file, method_idx, requires_barrier, compiler_driver->GetInstructionSet(),
Mathieu Chartiere401d142015-04-22 13:56:20 -0700712 kInvalidInvokeType, compiler_driver->GetCompilerOptions().GetDebuggable());
Nicolas Geoffraye53798a2014-12-01 10:31:54 +0000713
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000714 std::unique_ptr<CodeGenerator> codegen(
Calin Juravlecd6dffe2015-01-08 17:35:35 +0000715 CodeGenerator::Create(graph,
716 instruction_set,
717 *compiler_driver->GetInstructionSetFeatures(),
718 compiler_driver->GetCompilerOptions()));
Nicolas Geoffray12df9eb2015-01-09 14:53:50 +0000719 if (codegen.get() == nullptr) {
Calin Juravle2be39e02015-04-21 13:56:34 +0100720 MaybeRecordStat(MethodCompilationStat::kNotCompiledNoCodegen);
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000721 return nullptr;
722 }
David Srbeckyc6b4dd82015-04-07 20:32:43 +0100723 codegen->GetAssembler()->cfi().SetEnabled(
David Srbecky8363c772015-05-28 16:12:43 +0100724 compiler_driver->GetCompilerOptions().GetGenerateDebugInfo());
Nicolas Geoffray787c3072014-03-17 10:20:19 +0000725
David Brazdil69ba7b72015-06-23 18:27:30 +0100726 PassObserver pass_observer(graph,
727 method_name.c_str(),
728 codegen.get(),
729 visualizer_output_.get(),
730 compiler_driver);
David Brazdil5e8b1372015-01-23 14:39:08 +0000731
Nicolas Geoffray9523a3e2015-07-17 11:51:28 +0000732 const uint8_t* interpreter_metadata = nullptr;
733 {
734 ScopedObjectAccess soa(Thread::Current());
Mathieu Chartier736b5602015-09-02 14:54:11 -0700735 StackHandleScope<1> hs(soa.Self());
Nicolas Geoffray9523a3e2015-07-17 11:51:28 +0000736 Handle<mirror::ClassLoader> loader(hs.NewHandle(
737 soa.Decode<mirror::ClassLoader*>(class_loader)));
738 ArtMethod* art_method = compiler_driver->ResolveMethod(
739 soa, dex_cache, loader, &dex_compilation_unit, method_idx, invoke_type);
740 // We may not get a method, for example if its class is erroneous.
741 // TODO: Clean this up, the compiler driver should just pass the ArtMethod to compile.
742 if (art_method != nullptr) {
743 interpreter_metadata = art_method->GetQuickenedInfo();
744 }
745 }
David Brazdil5e8b1372015-01-23 14:39:08 +0000746 HGraphBuilder builder(graph,
747 &dex_compilation_unit,
748 &dex_compilation_unit,
749 &dex_file,
750 compiler_driver,
Nicolas Geoffray9523a3e2015-07-17 11:51:28 +0000751 compilation_stats_.get(),
Mathieu Chartier736b5602015-09-02 14:54:11 -0700752 interpreter_metadata,
753 dex_cache);
David Brazdil5e8b1372015-01-23 14:39:08 +0000754
755 VLOG(compiler) << "Building " << method_name;
756
David Brazdil809658e2015-02-05 11:34:02 +0000757 {
David Brazdil69ba7b72015-06-23 18:27:30 +0100758 PassScope scope(HGraphBuilder::kBuilderPassName, &pass_observer);
David Brazdil809658e2015-02-05 11:34:02 +0000759 if (!builder.BuildGraph(*code_item)) {
David Brazdil69ba7b72015-06-23 18:27:30 +0100760 pass_observer.SetGraphInBadState();
David Brazdil809658e2015-02-05 11:34:02 +0000761 return nullptr;
762 }
David Brazdil5e8b1372015-01-23 14:39:08 +0000763 }
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100764
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000765 VLOG(compiler) << "Optimizing " << method_name;
766 if (run_optimizations_) {
David Brazdil809658e2015-02-05 11:34:02 +0000767 {
David Brazdil69ba7b72015-06-23 18:27:30 +0100768 PassScope scope(SsaBuilder::kSsaBuilderPassName, &pass_observer);
David Brazdil809658e2015-02-05 11:34:02 +0000769 if (!graph->TryBuildingSsa()) {
770 // We could not transform the graph to SSA, bailout.
771 LOG(INFO) << "Skipping compilation of " << method_name << ": it contains a non natural loop";
Calin Juravle2be39e02015-04-21 13:56:34 +0100772 MaybeRecordStat(MethodCompilationStat::kNotCompiledCannotBuildSSA);
David Brazdilffee3d32015-07-06 11:48:53 +0100773 pass_observer.SetGraphInBadState();
David Brazdil809658e2015-02-05 11:34:02 +0000774 return nullptr;
775 }
Nicolas Geoffrayf5370122014-12-02 11:51:19 +0000776 }
David Brazdil5e8b1372015-01-23 14:39:08 +0000777
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000778 RunOptimizations(graph,
779 codegen.get(),
780 compiler_driver,
781 compilation_stats_.get(),
782 dex_compilation_unit,
783 &pass_observer);
784 codegen->CompileOptimized(code_allocator);
785 } else {
786 codegen->CompileBaseline(code_allocator);
Nicolas Geoffray86dbb9a2014-06-04 11:12:39 +0100787 }
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000788 pass_observer.DumpDisassembly();
Vladimir Markof9f64412015-09-02 14:05:49 +0100789
790 if (kArenaAllocatorCountAllocations) {
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000791 if (arena->BytesAllocated() > 4 * MB) {
792 MemStats mem_stats(arena->GetMemStats());
Vladimir Markof9f64412015-09-02 14:05:49 +0100793 LOG(INFO) << PrettyMethod(method_idx, dex_file) << " " << Dumpable<MemStats>(mem_stats);
794 }
795 }
796
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000797 return codegen.release();
Nicolas Geoffrayb34f69a2014-03-07 15:28:39 +0000798}
799
Calin Juravle07380a22015-09-17 14:15:12 +0100800static bool CanHandleVerificationFailure(const VerifiedMethod* verified_method) {
801 // For access errors the compiler will use the unresolved helpers (e.g. HInvokeUnresolved).
802 uint32_t unresolved_mask = verifier::VerifyError::VERIFY_ERROR_NO_CLASS
803 | verifier::VerifyError::VERIFY_ERROR_ACCESS_CLASS
804 | verifier::VerifyError::VERIFY_ERROR_ACCESS_FIELD
805 | verifier::VerifyError::VERIFY_ERROR_ACCESS_METHOD;
Calin Juravle175dc732015-08-25 15:42:32 +0100806 return (verified_method->GetEncounteredVerificationFailures() & (~unresolved_mask)) == 0;
807}
808
Nicolas Geoffray216eaa22015-03-17 17:09:30 +0000809CompiledMethod* OptimizingCompiler::Compile(const DexFile::CodeItem* code_item,
810 uint32_t access_flags,
811 InvokeType invoke_type,
812 uint16_t class_def_idx,
813 uint32_t method_idx,
Calin Juravlef1c6d9e2015-04-13 18:42:21 +0100814 jobject jclass_loader,
Mathieu Chartier736b5602015-09-02 14:54:11 -0700815 const DexFile& dex_file,
816 Handle<mirror::DexCache> dex_cache) const {
Calin Juravlef1c6d9e2015-04-13 18:42:21 +0100817 CompilerDriver* compiler_driver = GetCompilerDriver();
818 CompiledMethod* method = nullptr;
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000819 DCHECK(Runtime::Current()->IsAotCompiler());
820 const VerifiedMethod* verified_method = compiler_driver->GetVerifiedMethod(&dex_file, method_idx);
821 DCHECK(!verified_method->HasRuntimeThrow());
822 if (compiler_driver->IsMethodVerifiedWithoutFailures(method_idx, class_def_idx, dex_file)
823 || CanHandleVerificationFailure(verified_method)) {
824 ArenaAllocator arena(Runtime::Current()->GetArenaPool());
825 CodeVectorAllocator code_allocator(&arena);
826 std::unique_ptr<CodeGenerator> codegen(
827 TryCompile(&arena,
828 &code_allocator,
829 code_item,
830 access_flags,
831 invoke_type,
832 class_def_idx,
833 method_idx,
834 jclass_loader,
835 dex_file,
836 dex_cache));
837 if (codegen.get() != nullptr) {
838 if (run_optimizations_) {
839 method = EmitOptimized(&arena, &code_allocator, codegen.get(), compiler_driver);
Nicolas Geoffray0c3c2662015-10-15 13:53:04 +0100840 } else {
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000841 method = EmitBaseline(&arena, &code_allocator, codegen.get(), compiler_driver);
Nicolas Geoffray0c3c2662015-10-15 13:53:04 +0100842 }
Calin Juravlef1c6d9e2015-04-13 18:42:21 +0100843 }
Nicolas Geoffray0c3c2662015-10-15 13:53:04 +0100844 } else {
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000845 if (compiler_driver->GetCompilerOptions().VerifyAtRuntime()) {
846 MaybeRecordStat(MethodCompilationStat::kNotCompiledVerifyAtRuntime);
847 } else {
848 MaybeRecordStat(MethodCompilationStat::kNotCompiledClassNotVerified);
849 }
Calin Juravlef1c6d9e2015-04-13 18:42:21 +0100850 }
851
Calin Juravlecff8cc72015-10-09 12:03:24 +0100852 if (kIsDebugBuild &&
853 IsCompilingWithCoreImage() &&
854 IsInstructionSetSupported(compiler_driver->GetInstructionSet())) {
Calin Juravle09b1d6f2015-10-07 12:08:54 +0100855 // For testing purposes, we put a special marker on method names that should be compiled
856 // with this compiler. This makes sure we're not regressing.
857 std::string method_name = PrettyMethod(method_idx, dex_file);
858 bool shouldCompile = method_name.find("$opt$") != std::string::npos;
859 DCHECK((method != nullptr) || !shouldCompile) << "Didn't compile " << method_name;
860 }
861
Nicolas Geoffray12be74e2015-03-30 13:29:08 +0100862 return method;
Nicolas Geoffray216eaa22015-03-17 17:09:30 +0000863}
864
Andreas Gampe53c913b2014-08-12 23:19:23 -0700865Compiler* CreateOptimizingCompiler(CompilerDriver* driver) {
866 return new OptimizingCompiler(driver);
867}
868
Nicolas Geoffray335005e2015-06-25 10:01:47 +0100869bool IsCompilingWithCoreImage() {
870 const std::string& image = Runtime::Current()->GetImageLocation();
871 return EndsWith(image, "core.art") || EndsWith(image, "core-optimizing.art");
872}
873
Nicolas Geoffrayd28b9692015-11-04 14:36:55 +0000874bool OptimizingCompiler::JitCompile(Thread* self,
875 jit::JitCodeCache* code_cache,
876 ArtMethod* method) {
877 StackHandleScope<2> hs(self);
878 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(
879 method->GetDeclaringClass()->GetClassLoader()));
880 Handle<mirror::DexCache> dex_cache(hs.NewHandle(method->GetDexCache()));
881
882 jobject jclass_loader = class_loader.ToJObject();
883 const DexFile* dex_file = method->GetDexFile();
884 const uint16_t class_def_idx = method->GetClassDefIndex();
885 const DexFile::CodeItem* code_item = dex_file->GetCodeItem(method->GetCodeItemOffset());
886 const uint32_t method_idx = method->GetDexMethodIndex();
887 const uint32_t access_flags = method->GetAccessFlags();
888 const InvokeType invoke_type = method->GetInvokeType();
889
890 ArenaAllocator arena(Runtime::Current()->GetArenaPool());
891 CodeVectorAllocator code_allocator(&arena);
892 std::unique_ptr<CodeGenerator> codegen;
893 {
894 // Go to native so that we don't block GC during compilation.
895 ScopedThreadSuspension sts(self, kNative);
896
897 DCHECK(run_optimizations_);
898 codegen.reset(
899 TryCompile(&arena,
900 &code_allocator,
901 code_item,
902 access_flags,
903 invoke_type,
904 class_def_idx,
905 method_idx,
906 jclass_loader,
907 *dex_file,
908 dex_cache));
909 if (codegen.get() == nullptr) {
910 return false;
911 }
912 }
913
914 size_t stack_map_size = codegen->ComputeStackMapsSize();
915 uint8_t* stack_map_data = code_cache->ReserveData(self, stack_map_size);
916 if (stack_map_data == nullptr) {
917 return false;
918 }
919 codegen->BuildStackMaps(MemoryRegion(stack_map_data, stack_map_size));
920 const void* code = code_cache->CommitCode(
921 self,
922 method,
923 nullptr,
924 stack_map_data,
925 nullptr,
926 codegen->HasEmptyFrame() ? 0 : codegen->GetFrameSize(),
927 codegen->GetCoreSpillMask(),
928 codegen->GetFpuSpillMask(),
929 code_allocator.GetMemory().data(),
930 code_allocator.GetSize());
931
932 if (code == nullptr) {
933 code_cache->ClearData(self, stack_map_data);
934 return false;
935 }
936
937 return true;
938}
939
Nicolas Geoffrayb34f69a2014-03-07 15:28:39 +0000940} // namespace art