blob: 940351664157687df9b3198aac70d79060044b7f [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
Yevgeny Roubane3ea8382014-08-08 16:29:38 +070018#include "driver/compiler_options.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070019#include "dex_file-inl.h"
20#include "gc_map.h"
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +000021#include "gc_map_builder.h"
Ian Rogers96faf5b2013-08-09 22:05:32 -070022#include "mapping_table.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070023#include "mir_to_lir-inl.h"
Vladimir Marko5816ed42013-11-27 17:04:20 +000024#include "dex/quick/dex_file_method_inliner.h"
25#include "dex/quick/dex_file_to_method_inliner_map.h"
Vladimir Markoc7f83202014-01-24 17:55:18 +000026#include "dex/verification_results.h"
Vladimir Marko2730db02014-01-27 11:15:17 +000027#include "dex/verified_method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070028#include "verifier/dex_gc_map.h"
29#include "verifier/method_verifier.h"
Vladimir Marko2e589aa2014-02-25 17:53:53 +000030#include "vmap_table.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070031
32namespace art {
33
Vladimir Marko06606b92013-12-02 15:31:08 +000034namespace {
35
36/* Dump a mapping table */
37template <typename It>
38void DumpMappingTable(const char* table_name, const char* descriptor, const char* name,
39 const Signature& signature, uint32_t size, It first) {
40 if (size != 0) {
Ian Rogers107c31e2014-01-23 20:55:29 -080041 std::string line(StringPrintf("\n %s %s%s_%s_table[%u] = {", table_name,
Vladimir Marko06606b92013-12-02 15:31:08 +000042 descriptor, name, signature.ToString().c_str(), size));
43 std::replace(line.begin(), line.end(), ';', '_');
44 LOG(INFO) << line;
45 for (uint32_t i = 0; i != size; ++i) {
46 line = StringPrintf(" {0x%05x, 0x%04x},", first.NativePcOffset(), first.DexPc());
47 ++first;
48 LOG(INFO) << line;
49 }
50 LOG(INFO) <<" };\n\n";
51 }
52}
53
54} // anonymous namespace
55
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070056bool Mir2Lir::IsInexpensiveConstant(RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070057 bool res = false;
58 if (rl_src.is_const) {
59 if (rl_src.wide) {
Andreas Gampede0b9962014-08-27 14:24:42 -070060 // For wide registers, check whether we're the high partner. In that case we need to switch
61 // to the lower one for the correct value.
62 if (rl_src.high_word) {
63 rl_src.high_word = false;
64 rl_src.s_reg_low--;
65 rl_src.orig_sreg--;
66 }
Brian Carlstrom7940e442013-07-12 13:46:57 -070067 if (rl_src.fp) {
Andreas Gampede0b9962014-08-27 14:24:42 -070068 res = InexpensiveConstantDouble(mir_graph_->ConstantValueWide(rl_src));
Brian Carlstrom7940e442013-07-12 13:46:57 -070069 } else {
Andreas Gampede0b9962014-08-27 14:24:42 -070070 res = InexpensiveConstantLong(mir_graph_->ConstantValueWide(rl_src));
Brian Carlstrom7940e442013-07-12 13:46:57 -070071 }
72 } else {
73 if (rl_src.fp) {
Andreas Gampede0b9962014-08-27 14:24:42 -070074 res = InexpensiveConstantFloat(mir_graph_->ConstantValue(rl_src));
Brian Carlstrom7940e442013-07-12 13:46:57 -070075 } else {
Andreas Gampede0b9962014-08-27 14:24:42 -070076 res = InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src));
Brian Carlstrom7940e442013-07-12 13:46:57 -070077 }
78 }
79 }
80 return res;
81}
82
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070083void Mir2Lir::MarkSafepointPC(LIR* inst) {
buzbeeb48819d2013-09-14 16:15:25 -070084 DCHECK(!inst->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +010085 inst->u.m.def_mask = &kEncodeAll;
Brian Carlstrom7940e442013-07-12 13:46:57 -070086 LIR* safepoint_pc = NewLIR0(kPseudoSafepointPC);
Vladimir Marko8dea81c2014-06-06 14:50:36 +010087 DCHECK(safepoint_pc->u.m.def_mask->Equals(kEncodeAll));
Brian Carlstrom7940e442013-07-12 13:46:57 -070088}
89
Andreas Gampe3c12c512014-06-24 18:46:29 +000090void Mir2Lir::MarkSafepointPCAfter(LIR* after) {
91 DCHECK(!after->flags.use_def_invalid);
92 after->u.m.def_mask = &kEncodeAll;
93 // As NewLIR0 uses Append, we need to create the LIR by hand.
94 LIR* safepoint_pc = RawLIR(current_dalvik_offset_, kPseudoSafepointPC);
95 if (after->next == nullptr) {
96 DCHECK_EQ(after, last_lir_insn_);
97 AppendLIR(safepoint_pc);
98 } else {
99 InsertLIRAfter(after, safepoint_pc);
100 }
101 DCHECK(safepoint_pc->u.m.def_mask->Equals(kEncodeAll));
102}
103
buzbee252254b2013-09-08 16:20:53 -0700104/* Remove a LIR from the list. */
105void Mir2Lir::UnlinkLIR(LIR* lir) {
106 if (UNLIKELY(lir == first_lir_insn_)) {
107 first_lir_insn_ = lir->next;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700108 if (lir->next != nullptr) {
109 lir->next->prev = nullptr;
buzbee252254b2013-09-08 16:20:53 -0700110 } else {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700111 DCHECK(lir->next == nullptr);
buzbee252254b2013-09-08 16:20:53 -0700112 DCHECK(lir == last_lir_insn_);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700113 last_lir_insn_ = nullptr;
buzbee252254b2013-09-08 16:20:53 -0700114 }
115 } else if (lir == last_lir_insn_) {
116 last_lir_insn_ = lir->prev;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700117 lir->prev->next = nullptr;
118 } else if ((lir->prev != nullptr) && (lir->next != nullptr)) {
buzbee252254b2013-09-08 16:20:53 -0700119 lir->prev->next = lir->next;
120 lir->next->prev = lir->prev;
121 }
122}
123
Brian Carlstrom7940e442013-07-12 13:46:57 -0700124/* Convert an instruction to a NOP */
Brian Carlstromdf629502013-07-17 22:39:56 -0700125void Mir2Lir::NopLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700126 lir->flags.is_nop = true;
buzbee252254b2013-09-08 16:20:53 -0700127 if (!cu_->verbose) {
128 UnlinkLIR(lir);
129 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700130}
131
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700132void Mir2Lir::SetMemRefType(LIR* lir, bool is_load, int mem_type) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700133 DCHECK(GetTargetInstFlags(lir->opcode) & (IS_LOAD | IS_STORE));
buzbeeb48819d2013-09-14 16:15:25 -0700134 DCHECK(!lir->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100135 // TODO: Avoid the extra Arena allocation!
136 const ResourceMask** mask_ptr;
137 ResourceMask mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700138 if (is_load) {
buzbeeb48819d2013-09-14 16:15:25 -0700139 mask_ptr = &lir->u.m.use_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700140 } else {
buzbeeb48819d2013-09-14 16:15:25 -0700141 mask_ptr = &lir->u.m.def_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700142 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100143 mask = **mask_ptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700144 /* Clear out the memref flags */
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100145 mask.ClearBits(kEncodeMem);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700146 /* ..and then add back the one we need */
147 switch (mem_type) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100148 case ResourceMask::kLiteral:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700149 DCHECK(is_load);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100150 mask.SetBit(ResourceMask::kLiteral);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700151 break;
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100152 case ResourceMask::kDalvikReg:
153 mask.SetBit(ResourceMask::kDalvikReg);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700154 break;
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100155 case ResourceMask::kHeapRef:
156 mask.SetBit(ResourceMask::kHeapRef);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700157 break;
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100158 case ResourceMask::kMustNotAlias:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700159 /* Currently only loads can be marked as kMustNotAlias */
160 DCHECK(!(GetTargetInstFlags(lir->opcode) & IS_STORE));
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100161 mask.SetBit(ResourceMask::kMustNotAlias);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700162 break;
163 default:
164 LOG(FATAL) << "Oat: invalid memref kind - " << mem_type;
165 }
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100166 *mask_ptr = mask_cache_.GetMask(mask);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700167}
168
169/*
170 * Mark load/store instructions that access Dalvik registers through the stack.
171 */
172void Mir2Lir::AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700173 bool is64bit) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100174 DCHECK((is_load ? lir->u.m.use_mask : lir->u.m.def_mask)->Intersection(kEncodeMem).Equals(
175 kEncodeDalvikReg));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700176
177 /*
178 * Store the Dalvik register id in alias_info. Mark the MSB if it is a 64-bit
179 * access.
180 */
buzbeeb48819d2013-09-14 16:15:25 -0700181 lir->flags.alias_info = ENCODE_ALIAS_INFO(reg_id, is64bit);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700182}
183
184/*
185 * Debugging macros
186 */
187#define DUMP_RESOURCE_MASK(X)
188
189/* Pretty-print a LIR instruction */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700190void Mir2Lir::DumpLIRInsn(LIR* lir, unsigned char* base_addr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700191 int offset = lir->offset;
192 int dest = lir->operands[0];
193 const bool dump_nop = (cu_->enable_debug & (1 << kDebugShowNops));
194
195 /* Handle pseudo-ops individually, and all regular insns as a group */
196 switch (lir->opcode) {
197 case kPseudoMethodEntry:
198 LOG(INFO) << "-------- method entry "
199 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
200 break;
201 case kPseudoMethodExit:
202 LOG(INFO) << "-------- Method_Exit";
203 break;
204 case kPseudoBarrier:
205 LOG(INFO) << "-------- BARRIER";
206 break;
207 case kPseudoEntryBlock:
208 LOG(INFO) << "-------- entry offset: 0x" << std::hex << dest;
209 break;
210 case kPseudoDalvikByteCodeBoundary:
211 if (lir->operands[0] == 0) {
buzbee0d829482013-10-11 15:24:55 -0700212 // NOTE: only used for debug listings.
213 lir->operands[0] = WrapPointer(ArenaStrdup("No instruction string"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700214 }
215 LOG(INFO) << "-------- dalvik offset: 0x" << std::hex
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000216 << lir->dalvik_offset << " @ "
217 << reinterpret_cast<char*>(UnwrapPointer(lir->operands[0]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700218 break;
219 case kPseudoExitBlock:
220 LOG(INFO) << "-------- exit offset: 0x" << std::hex << dest;
221 break;
222 case kPseudoPseudoAlign4:
223 LOG(INFO) << reinterpret_cast<uintptr_t>(base_addr) + offset << " (0x" << std::hex
224 << offset << "): .align4";
225 break;
226 case kPseudoEHBlockLabel:
227 LOG(INFO) << "Exception_Handling:";
228 break;
229 case kPseudoTargetLabel:
230 case kPseudoNormalBlockLabel:
231 LOG(INFO) << "L" << reinterpret_cast<void*>(lir) << ":";
232 break;
233 case kPseudoThrowTarget:
234 LOG(INFO) << "LT" << reinterpret_cast<void*>(lir) << ":";
235 break;
236 case kPseudoIntrinsicRetry:
237 LOG(INFO) << "IR" << reinterpret_cast<void*>(lir) << ":";
238 break;
239 case kPseudoSuspendTarget:
240 LOG(INFO) << "LS" << reinterpret_cast<void*>(lir) << ":";
241 break;
242 case kPseudoSafepointPC:
243 LOG(INFO) << "LsafepointPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
244 break;
245 case kPseudoExportedPC:
246 LOG(INFO) << "LexportedPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
247 break;
248 case kPseudoCaseLabel:
249 LOG(INFO) << "LC" << reinterpret_cast<void*>(lir) << ": Case target 0x"
250 << std::hex << lir->operands[0] << "|" << std::dec <<
251 lir->operands[0];
252 break;
253 default:
254 if (lir->flags.is_nop && !dump_nop) {
255 break;
256 } else {
257 std::string op_name(BuildInsnString(GetTargetInstName(lir->opcode),
258 lir, base_addr));
259 std::string op_operands(BuildInsnString(GetTargetInstFmt(lir->opcode),
260 lir, base_addr));
Ian Rogers107c31e2014-01-23 20:55:29 -0800261 LOG(INFO) << StringPrintf("%5p: %-9s%s%s",
262 base_addr + offset,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700263 op_name.c_str(), op_operands.c_str(),
264 lir->flags.is_nop ? "(nop)" : "");
265 }
266 break;
267 }
268
buzbeeb48819d2013-09-14 16:15:25 -0700269 if (lir->u.m.use_mask && (!lir->flags.is_nop || dump_nop)) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100270 DUMP_RESOURCE_MASK(DumpResourceMask(lir, *lir->u.m.use_mask, "use"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700271 }
buzbeeb48819d2013-09-14 16:15:25 -0700272 if (lir->u.m.def_mask && (!lir->flags.is_nop || dump_nop)) {
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100273 DUMP_RESOURCE_MASK(DumpResourceMask(lir, *lir->u.m.def_mask, "def"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700274 }
275}
276
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700277void Mir2Lir::DumpPromotionMap() {
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700278 uint32_t num_regs = mir_graph_->GetNumOfCodeAndTempVRs();
279 for (uint32_t i = 0; i < num_regs; i++) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700280 PromotionMap v_reg_map = promotion_map_[i];
281 std::string buf;
282 if (v_reg_map.fp_location == kLocPhysReg) {
buzbeeb5860fb2014-06-21 15:31:01 -0700283 StringAppendF(&buf, " : s%d", RegStorage::RegNum(v_reg_map.fp_reg));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700284 }
285
286 std::string buf3;
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700287 if (i < mir_graph_->GetNumOfCodeVRs()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700288 StringAppendF(&buf3, "%02d", i);
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700289 } else if (i == mir_graph_->GetNumOfCodeVRs()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700290 buf3 = "Method*";
291 } else {
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700292 uint32_t diff = i - mir_graph_->GetNumOfCodeVRs();
293 StringAppendF(&buf3, "ct%d", diff);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700294 }
295
296 LOG(INFO) << StringPrintf("V[%s] -> %s%d%s", buf3.c_str(),
297 v_reg_map.core_location == kLocPhysReg ?
298 "r" : "SP+", v_reg_map.core_location == kLocPhysReg ?
299 v_reg_map.core_reg : SRegOffset(i),
300 buf.c_str());
301 }
302}
303
buzbee7a11ab02014-04-28 20:02:38 -0700304void Mir2Lir::UpdateLIROffsets() {
305 // Only used for code listings.
306 size_t offset = 0;
307 for (LIR* lir = first_lir_insn_; lir != nullptr; lir = lir->next) {
308 lir->offset = offset;
309 if (!lir->flags.is_nop && !IsPseudoLirOp(lir->opcode)) {
310 offset += GetInsnSize(lir);
311 } else if (lir->opcode == kPseudoPseudoAlign4) {
312 offset += (offset & 0x2);
313 }
314 }
315}
316
Brian Carlstrom7940e442013-07-12 13:46:57 -0700317/* Dump instructions and constant pool contents */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700318void Mir2Lir::CodegenDump() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700319 LOG(INFO) << "Dumping LIR insns for "
320 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
321 LIR* lir_insn;
Razvan A Lupusoru75035972014-09-11 15:24:59 -0700322 int insns_size = mir_graph_->GetNumDalvikInsns();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700323
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -0700324 LOG(INFO) << "Regs (excluding ins) : " << mir_graph_->GetNumOfLocalCodeVRs();
325 LOG(INFO) << "Ins : " << mir_graph_->GetNumOfInVRs();
326 LOG(INFO) << "Outs : " << mir_graph_->GetNumOfOutVRs();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700327 LOG(INFO) << "CoreSpills : " << num_core_spills_;
328 LOG(INFO) << "FPSpills : " << num_fp_spills_;
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -0800329 LOG(INFO) << "CompilerTemps : " << mir_graph_->GetNumUsedCompilerTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700330 LOG(INFO) << "Frame size : " << frame_size_;
331 LOG(INFO) << "code size is " << total_size_ <<
332 " bytes, Dalvik size is " << insns_size * 2;
333 LOG(INFO) << "expansion factor: "
334 << static_cast<float>(total_size_) / static_cast<float>(insns_size * 2);
335 DumpPromotionMap();
buzbee7a11ab02014-04-28 20:02:38 -0700336 UpdateLIROffsets();
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700337 for (lir_insn = first_lir_insn_; lir_insn != nullptr; lir_insn = lir_insn->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700338 DumpLIRInsn(lir_insn, 0);
339 }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700340 for (lir_insn = literal_list_; lir_insn != nullptr; lir_insn = lir_insn->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700341 LOG(INFO) << StringPrintf("%x (%04x): .word (%#x)", lir_insn->offset, lir_insn->offset,
342 lir_insn->operands[0]);
343 }
344
345 const DexFile::MethodId& method_id =
346 cu_->dex_file->GetMethodId(cu_->method_idx);
Ian Rogersd91d6d62013-09-25 20:26:14 -0700347 const Signature signature = cu_->dex_file->GetMethodSignature(method_id);
348 const char* name = cu_->dex_file->GetMethodName(method_id);
349 const char* descriptor(cu_->dex_file->GetMethodDeclaringClassDescriptor(method_id));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700350
351 // Dump mapping tables
Vladimir Marko06606b92013-12-02 15:31:08 +0000352 if (!encoded_mapping_table_.empty()) {
353 MappingTable table(&encoded_mapping_table_[0]);
354 DumpMappingTable("PC2Dex_MappingTable", descriptor, name, signature,
355 table.PcToDexSize(), table.PcToDexBegin());
356 DumpMappingTable("Dex2PC_MappingTable", descriptor, name, signature,
357 table.DexToPcSize(), table.DexToPcBegin());
358 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700359}
360
361/*
362 * Search the existing constants in the literal pool for an exact or close match
363 * within specified delta (greater or equal to 0).
364 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700365LIR* Mir2Lir::ScanLiteralPool(LIR* data_target, int value, unsigned int delta) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700366 while (data_target) {
367 if ((static_cast<unsigned>(value - data_target->operands[0])) <= delta)
368 return data_target;
369 data_target = data_target->next;
370 }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700371 return nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700372}
373
374/* Search the existing constants in the literal pool for an exact wide match */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700375LIR* Mir2Lir::ScanLiteralPoolWide(LIR* data_target, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700376 bool lo_match = false;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700377 LIR* lo_target = nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700378 while (data_target) {
379 if (lo_match && (data_target->operands[0] == val_hi)) {
380 // Record high word in case we need to expand this later.
381 lo_target->operands[1] = val_hi;
382 return lo_target;
383 }
384 lo_match = false;
385 if (data_target->operands[0] == val_lo) {
386 lo_match = true;
387 lo_target = data_target;
388 }
389 data_target = data_target->next;
390 }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700391 return nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700392}
393
Vladimir Markoa51a0b02014-05-21 12:08:39 +0100394/* Search the existing constants in the literal pool for an exact method match */
395LIR* Mir2Lir::ScanLiteralPoolMethod(LIR* data_target, const MethodReference& method) {
396 while (data_target) {
397 if (static_cast<uint32_t>(data_target->operands[0]) == method.dex_method_index &&
398 UnwrapPointer(data_target->operands[1]) == method.dex_file) {
399 return data_target;
400 }
401 data_target = data_target->next;
402 }
403 return nullptr;
404}
405
Fred Shihe7f82e22014-08-06 10:46:37 -0700406/* Search the existing constants in the literal pool for an exact class match */
407LIR* Mir2Lir::ScanLiteralPoolClass(LIR* data_target, const DexFile& dex_file, uint32_t type_idx) {
408 while (data_target) {
409 if (static_cast<uint32_t>(data_target->operands[0]) == type_idx &&
410 UnwrapPointer(data_target->operands[1]) == &dex_file) {
411 return data_target;
412 }
413 data_target = data_target->next;
414 }
415 return nullptr;
416}
417
Brian Carlstrom7940e442013-07-12 13:46:57 -0700418/*
419 * The following are building blocks to insert constants into the pool or
420 * instruction streams.
421 */
422
423/* Add a 32-bit constant to the constant pool */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700424LIR* Mir2Lir::AddWordData(LIR* *constant_list_p, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700425 /* Add the constant to the literal pool */
426 if (constant_list_p) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000427 LIR* new_value = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocData));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700428 new_value->operands[0] = value;
429 new_value->next = *constant_list_p;
430 *constant_list_p = new_value;
buzbeeb48819d2013-09-14 16:15:25 -0700431 estimated_native_code_size_ += sizeof(value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700432 return new_value;
433 }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700434 return nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700435}
436
437/* Add a 64-bit constant to the constant pool or mixed with code */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700438LIR* Mir2Lir::AddWideData(LIR* *constant_list_p, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700439 AddWordData(constant_list_p, val_hi);
440 return AddWordData(constant_list_p, val_lo);
441}
442
Andreas Gampe2da88232014-02-27 12:26:20 -0800443static void Push32(std::vector<uint8_t>&buf, int data) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700444 buf.push_back(data & 0xff);
445 buf.push_back((data >> 8) & 0xff);
446 buf.push_back((data >> 16) & 0xff);
447 buf.push_back((data >> 24) & 0xff);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700448}
449
Matteo Franchin27cc0932014-09-08 18:29:24 +0100450/**
451 * @brief Push a compressed reference which needs patching at link/patchoat-time.
452 * @details This needs to be kept consistent with the code which actually does the patching in
453 * oat_writer.cc and in the patchoat tool.
454 */
455static void PushUnpatchedReference(std::vector<uint8_t>&buf) {
456 // Note that we can safely initialize the patches to zero. The code deduplication mechanism takes
457 // the patches into account when determining whether two pieces of codes are functionally
458 // equivalent.
459 Push32(buf, UINT32_C(0));
buzbee0d829482013-10-11 15:24:55 -0700460}
461
Brian Carlstrom7940e442013-07-12 13:46:57 -0700462static void AlignBuffer(std::vector<uint8_t>&buf, size_t offset) {
463 while (buf.size() < offset) {
464 buf.push_back(0);
465 }
466}
467
468/* Write the literal pool to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700469void Mir2Lir::InstallLiteralPools() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700470 AlignBuffer(code_buffer_, data_offset_);
471 LIR* data_lir = literal_list_;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700472 while (data_lir != nullptr) {
Andreas Gampe2da88232014-02-27 12:26:20 -0800473 Push32(code_buffer_, data_lir->operands[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700474 data_lir = NEXT_LIR(data_lir);
475 }
Vladimir Markof4da6752014-08-01 19:04:18 +0100476 // TODO: patches_.reserve() as needed.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700477 // Push code and method literals, record offsets for the compiler to patch.
478 data_lir = code_literal_list_;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700479 while (data_lir != nullptr) {
Jeff Hao49161ce2014-03-12 11:05:25 -0700480 uint32_t target_method_idx = data_lir->operands[0];
481 const DexFile* target_dex_file =
482 reinterpret_cast<const DexFile*>(UnwrapPointer(data_lir->operands[1]));
Vladimir Markof4da6752014-08-01 19:04:18 +0100483 patches_.push_back(LinkerPatch::CodePatch(code_buffer_.size(),
484 target_dex_file, target_method_idx));
Matteo Franchin27cc0932014-09-08 18:29:24 +0100485 PushUnpatchedReference(code_buffer_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700486 data_lir = NEXT_LIR(data_lir);
487 }
488 data_lir = method_literal_list_;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700489 while (data_lir != nullptr) {
Jeff Hao49161ce2014-03-12 11:05:25 -0700490 uint32_t target_method_idx = data_lir->operands[0];
491 const DexFile* target_dex_file =
492 reinterpret_cast<const DexFile*>(UnwrapPointer(data_lir->operands[1]));
Vladimir Markof4da6752014-08-01 19:04:18 +0100493 patches_.push_back(LinkerPatch::MethodPatch(code_buffer_.size(),
494 target_dex_file, target_method_idx));
Matteo Franchin27cc0932014-09-08 18:29:24 +0100495 PushUnpatchedReference(code_buffer_);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700496 data_lir = NEXT_LIR(data_lir);
497 }
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800498 // Push class literals.
499 data_lir = class_literal_list_;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700500 while (data_lir != nullptr) {
Vladimir Markof4da6752014-08-01 19:04:18 +0100501 uint32_t target_type_idx = data_lir->operands[0];
Fred Shihe7f82e22014-08-06 10:46:37 -0700502 const DexFile* class_dex_file =
503 reinterpret_cast<const DexFile*>(UnwrapPointer(data_lir->operands[1]));
Vladimir Markof4da6752014-08-01 19:04:18 +0100504 patches_.push_back(LinkerPatch::TypePatch(code_buffer_.size(),
505 class_dex_file, target_type_idx));
Matteo Franchin27cc0932014-09-08 18:29:24 +0100506 PushUnpatchedReference(code_buffer_);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800507 data_lir = NEXT_LIR(data_lir);
508 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700509}
510
511/* Write the switch tables to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700512void Mir2Lir::InstallSwitchTables() {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100513 for (Mir2Lir::SwitchTable* tab_rec : switch_tables_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700514 AlignBuffer(code_buffer_, tab_rec->offset);
515 /*
516 * For Arm, our reference point is the address of the bx
517 * instruction that does the launch, so we have to subtract
518 * the auto pc-advance. For other targets the reference point
519 * is a label, so we can use the offset as-is.
520 */
521 int bx_offset = INVALID_OFFSET;
522 switch (cu_->instruction_set) {
523 case kThumb2:
buzbeeb48819d2013-09-14 16:15:25 -0700524 DCHECK(tab_rec->anchor->flags.fixup != kFixupNone);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700525 bx_offset = tab_rec->anchor->offset + 4;
526 break;
527 case kX86:
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700528 case kX86_64:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700529 bx_offset = 0;
530 break;
Matteo Franchine45fb9e2014-05-06 10:10:30 +0100531 case kArm64:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700532 case kMips:
533 bx_offset = tab_rec->anchor->offset;
534 break;
535 default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set;
536 }
537 if (cu_->verbose) {
538 LOG(INFO) << "Switch table for offset 0x" << std::hex << bx_offset;
539 }
540 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
buzbee0d829482013-10-11 15:24:55 -0700541 const int32_t* keys = reinterpret_cast<const int32_t*>(&(tab_rec->table[2]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700542 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
543 int disp = tab_rec->targets[elems]->offset - bx_offset;
544 if (cu_->verbose) {
545 LOG(INFO) << " Case[" << elems << "] key: 0x"
546 << std::hex << keys[elems] << ", disp: 0x"
547 << std::hex << disp;
548 }
Andreas Gampe2da88232014-02-27 12:26:20 -0800549 Push32(code_buffer_, keys[elems]);
550 Push32(code_buffer_,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700551 tab_rec->targets[elems]->offset - bx_offset);
552 }
553 } else {
554 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
555 static_cast<int>(Instruction::kPackedSwitchSignature));
556 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
557 int disp = tab_rec->targets[elems]->offset - bx_offset;
558 if (cu_->verbose) {
559 LOG(INFO) << " Case[" << elems << "] disp: 0x"
560 << std::hex << disp;
561 }
Andreas Gampe2da88232014-02-27 12:26:20 -0800562 Push32(code_buffer_, tab_rec->targets[elems]->offset - bx_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700563 }
564 }
565 }
566}
567
568/* Write the fill array dta to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700569void Mir2Lir::InstallFillArrayData() {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100570 for (Mir2Lir::FillArrayData* tab_rec : fill_array_data_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700571 AlignBuffer(code_buffer_, tab_rec->offset);
572 for (int i = 0; i < (tab_rec->size + 1) / 2; i++) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700573 code_buffer_.push_back(tab_rec->table[i] & 0xFF);
574 code_buffer_.push_back((tab_rec->table[i] >> 8) & 0xFF);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700575 }
576 }
577}
578
buzbee0d829482013-10-11 15:24:55 -0700579static int AssignLiteralOffsetCommon(LIR* lir, CodeOffset offset) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700580 for (; lir != nullptr; lir = lir->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700581 lir->offset = offset;
582 offset += 4;
583 }
584 return offset;
585}
586
Ian Rogersff093b32014-04-30 19:04:27 -0700587static int AssignLiteralPointerOffsetCommon(LIR* lir, CodeOffset offset,
588 unsigned int element_size) {
buzbee0d829482013-10-11 15:24:55 -0700589 // Align to natural pointer size.
Andreas Gampe66018822014-05-05 20:47:19 -0700590 offset = RoundUp(offset, element_size);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700591 for (; lir != nullptr; lir = lir->next) {
buzbee0d829482013-10-11 15:24:55 -0700592 lir->offset = offset;
593 offset += element_size;
594 }
595 return offset;
596}
597
Brian Carlstrom7940e442013-07-12 13:46:57 -0700598// Make sure we have a code address for every declared catch entry
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700599bool Mir2Lir::VerifyCatchEntries() {
Vladimir Marko06606b92013-12-02 15:31:08 +0000600 MappingTable table(&encoded_mapping_table_[0]);
601 std::vector<uint32_t> dex_pcs;
602 dex_pcs.reserve(table.DexToPcSize());
603 for (auto it = table.DexToPcBegin(), end = table.DexToPcEnd(); it != end; ++it) {
604 dex_pcs.push_back(it.DexPc());
605 }
606 // Sort dex_pcs, so that we can quickly check it against the ordered mir_graph_->catches_.
607 std::sort(dex_pcs.begin(), dex_pcs.end());
608
Brian Carlstrom7940e442013-07-12 13:46:57 -0700609 bool success = true;
Vladimir Marko06606b92013-12-02 15:31:08 +0000610 auto it = dex_pcs.begin(), end = dex_pcs.end();
611 for (uint32_t dex_pc : mir_graph_->catches_) {
612 while (it != end && *it < dex_pc) {
613 LOG(INFO) << "Unexpected catch entry @ dex pc 0x" << std::hex << *it;
614 ++it;
615 success = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700616 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000617 if (it == end || *it > dex_pc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700618 LOG(INFO) << "Missing native PC for catch entry @ 0x" << std::hex << dex_pc;
619 success = false;
Vladimir Marko06606b92013-12-02 15:31:08 +0000620 } else {
621 ++it;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700622 }
623 }
624 if (!success) {
625 LOG(INFO) << "Bad dex2pcMapping table in " << PrettyMethod(cu_->method_idx, *cu_->dex_file);
626 LOG(INFO) << "Entries @ decode: " << mir_graph_->catches_.size() << ", Entries in table: "
Vladimir Marko06606b92013-12-02 15:31:08 +0000627 << table.DexToPcSize();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700628 }
629 return success;
630}
631
632
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700633void Mir2Lir::CreateMappingTables() {
Yevgeny Roubane3ea8382014-08-08 16:29:38 +0700634 bool generate_src_map = cu_->compiler_driver->GetCompilerOptions().GetIncludeDebugSymbols();
635
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000636 uint32_t pc2dex_data_size = 0u;
637 uint32_t pc2dex_entries = 0u;
638 uint32_t pc2dex_offset = 0u;
639 uint32_t pc2dex_dalvik_offset = 0u;
Yevgeny Roubane3ea8382014-08-08 16:29:38 +0700640 uint32_t pc2dex_src_entries = 0u;
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000641 uint32_t dex2pc_data_size = 0u;
642 uint32_t dex2pc_entries = 0u;
643 uint32_t dex2pc_offset = 0u;
644 uint32_t dex2pc_dalvik_offset = 0u;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700645 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != nullptr; tgt_lir = NEXT_LIR(tgt_lir)) {
Yevgeny Roubane3ea8382014-08-08 16:29:38 +0700646 pc2dex_src_entries++;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700647 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000648 pc2dex_entries += 1;
649 DCHECK(pc2dex_offset <= tgt_lir->offset);
650 pc2dex_data_size += UnsignedLeb128Size(tgt_lir->offset - pc2dex_offset);
651 pc2dex_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) -
652 static_cast<int32_t>(pc2dex_dalvik_offset));
653 pc2dex_offset = tgt_lir->offset;
654 pc2dex_dalvik_offset = tgt_lir->dalvik_offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700655 }
656 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000657 dex2pc_entries += 1;
658 DCHECK(dex2pc_offset <= tgt_lir->offset);
659 dex2pc_data_size += UnsignedLeb128Size(tgt_lir->offset - dex2pc_offset);
660 dex2pc_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) -
661 static_cast<int32_t>(dex2pc_dalvik_offset));
662 dex2pc_offset = tgt_lir->offset;
663 dex2pc_dalvik_offset = tgt_lir->dalvik_offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700664 }
665 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000666
Yevgeny Roubane3ea8382014-08-08 16:29:38 +0700667 if (generate_src_map) {
668 src_mapping_table_.reserve(pc2dex_src_entries);
669 }
670
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000671 uint32_t total_entries = pc2dex_entries + dex2pc_entries;
672 uint32_t hdr_data_size = UnsignedLeb128Size(total_entries) + UnsignedLeb128Size(pc2dex_entries);
673 uint32_t data_size = hdr_data_size + pc2dex_data_size + dex2pc_data_size;
Vladimir Marko06606b92013-12-02 15:31:08 +0000674 encoded_mapping_table_.resize(data_size);
675 uint8_t* write_pos = &encoded_mapping_table_[0];
676 write_pos = EncodeUnsignedLeb128(write_pos, total_entries);
677 write_pos = EncodeUnsignedLeb128(write_pos, pc2dex_entries);
678 DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]), hdr_data_size);
679 uint8_t* write_pos2 = write_pos + pc2dex_data_size;
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000680
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000681 pc2dex_offset = 0u;
682 pc2dex_dalvik_offset = 0u;
Vladimir Marko06606b92013-12-02 15:31:08 +0000683 dex2pc_offset = 0u;
684 dex2pc_dalvik_offset = 0u;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700685 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != nullptr; tgt_lir = NEXT_LIR(tgt_lir)) {
Yevgeny Roubane3ea8382014-08-08 16:29:38 +0700686 if (generate_src_map && !tgt_lir->flags.is_nop) {
687 src_mapping_table_.push_back(SrcMapElem({tgt_lir->offset,
688 static_cast<int32_t>(tgt_lir->dalvik_offset)}));
689 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000690 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
691 DCHECK(pc2dex_offset <= tgt_lir->offset);
692 write_pos = EncodeUnsignedLeb128(write_pos, tgt_lir->offset - pc2dex_offset);
693 write_pos = EncodeSignedLeb128(write_pos, static_cast<int32_t>(tgt_lir->dalvik_offset) -
694 static_cast<int32_t>(pc2dex_dalvik_offset));
695 pc2dex_offset = tgt_lir->offset;
696 pc2dex_dalvik_offset = tgt_lir->dalvik_offset;
697 }
698 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
699 DCHECK(dex2pc_offset <= tgt_lir->offset);
700 write_pos2 = EncodeUnsignedLeb128(write_pos2, tgt_lir->offset - dex2pc_offset);
701 write_pos2 = EncodeSignedLeb128(write_pos2, static_cast<int32_t>(tgt_lir->dalvik_offset) -
702 static_cast<int32_t>(dex2pc_dalvik_offset));
703 dex2pc_offset = tgt_lir->offset;
704 dex2pc_dalvik_offset = tgt_lir->dalvik_offset;
705 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000706 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000707 DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]),
708 hdr_data_size + pc2dex_data_size);
709 DCHECK_EQ(static_cast<size_t>(write_pos2 - &encoded_mapping_table_[0]), data_size);
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000710
Ian Rogers96faf5b2013-08-09 22:05:32 -0700711 if (kIsDebugBuild) {
Vladimir Marko06606b92013-12-02 15:31:08 +0000712 CHECK(VerifyCatchEntries());
713
Ian Rogers96faf5b2013-08-09 22:05:32 -0700714 // Verify the encoded table holds the expected data.
Vladimir Marko06606b92013-12-02 15:31:08 +0000715 MappingTable table(&encoded_mapping_table_[0]);
Ian Rogers96faf5b2013-08-09 22:05:32 -0700716 CHECK_EQ(table.TotalSize(), total_entries);
717 CHECK_EQ(table.PcToDexSize(), pc2dex_entries);
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000718 auto it = table.PcToDexBegin();
Vladimir Marko06606b92013-12-02 15:31:08 +0000719 auto it2 = table.DexToPcBegin();
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700720 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != nullptr; tgt_lir = NEXT_LIR(tgt_lir)) {
Vladimir Marko06606b92013-12-02 15:31:08 +0000721 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
722 CHECK_EQ(tgt_lir->offset, it.NativePcOffset());
723 CHECK_EQ(tgt_lir->dalvik_offset, it.DexPc());
724 ++it;
725 }
726 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
727 CHECK_EQ(tgt_lir->offset, it2.NativePcOffset());
728 CHECK_EQ(tgt_lir->dalvik_offset, it2.DexPc());
729 ++it2;
730 }
Ian Rogers96faf5b2013-08-09 22:05:32 -0700731 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000732 CHECK(it == table.PcToDexEnd());
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000733 CHECK(it2 == table.DexToPcEnd());
Ian Rogers96faf5b2013-08-09 22:05:32 -0700734 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700735}
736
Brian Carlstrom7940e442013-07-12 13:46:57 -0700737void Mir2Lir::CreateNativeGcMap() {
Vladimir Marko06606b92013-12-02 15:31:08 +0000738 DCHECK(!encoded_mapping_table_.empty());
739 MappingTable mapping_table(&encoded_mapping_table_[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700740 uint32_t max_native_offset = 0;
Vladimir Marko06606b92013-12-02 15:31:08 +0000741 for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) {
742 uint32_t native_offset = it.NativePcOffset();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700743 if (native_offset > max_native_offset) {
744 max_native_offset = native_offset;
745 }
746 }
747 MethodReference method_ref(cu_->dex_file, cu_->method_idx);
Vladimir Marko2730db02014-01-27 11:15:17 +0000748 const std::vector<uint8_t>& gc_map_raw =
749 mir_graph_->GetCurrentDexCompilationUnit()->GetVerifiedMethod()->GetDexGcMap();
750 verifier::DexPcToReferenceMap dex_gc_map(&(gc_map_raw)[0]);
751 DCHECK_EQ(gc_map_raw.size(), dex_gc_map.RawSize());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700752 // Compute native offset to references size.
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +0000753 GcMapBuilder native_gc_map_builder(&native_gc_map_,
754 mapping_table.PcToDexSize(),
755 max_native_offset, dex_gc_map.RegWidth());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700756
Vladimir Marko06606b92013-12-02 15:31:08 +0000757 for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) {
758 uint32_t native_offset = it.NativePcOffset();
759 uint32_t dex_pc = it.DexPc();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700760 const uint8_t* references = dex_gc_map.FindBitMap(dex_pc, false);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700761 CHECK(references != nullptr) << "Missing ref for dex pc 0x" << std::hex << dex_pc <<
Dave Allisonf9439142014-03-27 15:10:22 -0700762 ": " << PrettyMethod(cu_->method_idx, *cu_->dex_file);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700763 native_gc_map_builder.AddEntry(native_offset, references);
764 }
765}
766
767/* Determine the offset of each literal field */
buzbee0d829482013-10-11 15:24:55 -0700768int Mir2Lir::AssignLiteralOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700769 offset = AssignLiteralOffsetCommon(literal_list_, offset);
Matteo Franchin27cc0932014-09-08 18:29:24 +0100770 constexpr unsigned int ptr_size = sizeof(uint32_t);
Andreas Gampe785d2f22014-11-03 22:57:30 -0800771 static_assert(ptr_size >= sizeof(mirror::HeapReference<mirror::Object>),
772 "Pointer size cannot hold a heap reference");
Ian Rogersff093b32014-04-30 19:04:27 -0700773 offset = AssignLiteralPointerOffsetCommon(code_literal_list_, offset, ptr_size);
774 offset = AssignLiteralPointerOffsetCommon(method_literal_list_, offset, ptr_size);
775 offset = AssignLiteralPointerOffsetCommon(class_literal_list_, offset, ptr_size);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700776 return offset;
777}
778
buzbee0d829482013-10-11 15:24:55 -0700779int Mir2Lir::AssignSwitchTablesOffset(CodeOffset offset) {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100780 for (Mir2Lir::SwitchTable* tab_rec : switch_tables_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700781 tab_rec->offset = offset;
782 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
783 offset += tab_rec->table[1] * (sizeof(int) * 2);
784 } else {
785 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
786 static_cast<int>(Instruction::kPackedSwitchSignature));
787 offset += tab_rec->table[1] * sizeof(int);
788 }
789 }
790 return offset;
791}
792
buzbee0d829482013-10-11 15:24:55 -0700793int Mir2Lir::AssignFillArrayDataOffset(CodeOffset offset) {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100794 for (Mir2Lir::FillArrayData* tab_rec : fill_array_data_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700795 tab_rec->offset = offset;
796 offset += tab_rec->size;
797 // word align
Andreas Gampe66018822014-05-05 20:47:19 -0700798 offset = RoundUp(offset, 4);
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100799 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700800 return offset;
801}
802
Brian Carlstrom7940e442013-07-12 13:46:57 -0700803/*
804 * Insert a kPseudoCaseLabel at the beginning of the Dalvik
buzbeeb48819d2013-09-14 16:15:25 -0700805 * offset vaddr if pretty-printing, otherise use the standard block
806 * label. The selected label will be used to fix up the case
buzbee252254b2013-09-08 16:20:53 -0700807 * branch table during the assembly phase. All resource flags
808 * are set to prevent code motion. KeyVal is just there for debugging.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700809 */
buzbee0d829482013-10-11 15:24:55 -0700810LIR* Mir2Lir::InsertCaseLabel(DexOffset vaddr, int keyVal) {
buzbee252254b2013-09-08 16:20:53 -0700811 LIR* boundary_lir = &block_label_list_[mir_graph_->FindBlock(vaddr)->id];
buzbeeb48819d2013-09-14 16:15:25 -0700812 LIR* res = boundary_lir;
813 if (cu_->verbose) {
814 // Only pay the expense if we're pretty-printing.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000815 LIR* new_label = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocLIR));
buzbeeb48819d2013-09-14 16:15:25 -0700816 new_label->dalvik_offset = vaddr;
817 new_label->opcode = kPseudoCaseLabel;
818 new_label->operands[0] = keyVal;
819 new_label->flags.fixup = kFixupLabel;
820 DCHECK(!new_label->flags.use_def_invalid);
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100821 new_label->u.m.def_mask = &kEncodeAll;
buzbeeb48819d2013-09-14 16:15:25 -0700822 InsertLIRAfter(boundary_lir, new_label);
823 res = new_label;
824 }
825 return res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700826}
827
buzbee0d829482013-10-11 15:24:55 -0700828void Mir2Lir::MarkPackedCaseLabels(Mir2Lir::SwitchTable* tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700829 const uint16_t* table = tab_rec->table;
buzbee0d829482013-10-11 15:24:55 -0700830 DexOffset base_vaddr = tab_rec->vaddr;
831 const int32_t *targets = reinterpret_cast<const int32_t*>(&table[4]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700832 int entries = table[1];
833 int low_key = s4FromSwitchData(&table[2]);
834 for (int i = 0; i < entries; i++) {
835 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], i + low_key);
836 }
837}
838
buzbee0d829482013-10-11 15:24:55 -0700839void Mir2Lir::MarkSparseCaseLabels(Mir2Lir::SwitchTable* tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700840 const uint16_t* table = tab_rec->table;
buzbee0d829482013-10-11 15:24:55 -0700841 DexOffset base_vaddr = tab_rec->vaddr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700842 int entries = table[1];
buzbee0d829482013-10-11 15:24:55 -0700843 const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]);
844 const int32_t* targets = &keys[entries];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700845 for (int i = 0; i < entries; i++) {
846 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], keys[i]);
847 }
848}
849
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700850void Mir2Lir::ProcessSwitchTables() {
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100851 for (Mir2Lir::SwitchTable* tab_rec : switch_tables_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700852 if (tab_rec->table[0] == Instruction::kPackedSwitchSignature) {
853 MarkPackedCaseLabels(tab_rec);
854 } else if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
855 MarkSparseCaseLabels(tab_rec);
856 } else {
857 LOG(FATAL) << "Invalid switch table";
858 }
859 }
860}
861
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700862void Mir2Lir::DumpSparseSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700863 /*
864 * Sparse switch data format:
865 * ushort ident = 0x0200 magic value
866 * ushort size number of entries in the table; > 0
867 * int keys[size] keys, sorted low-to-high; 32-bit aligned
868 * int targets[size] branch targets, relative to switch opcode
869 *
870 * Total size is (2+size*4) 16-bit code units.
871 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700872 uint16_t ident = table[0];
873 int entries = table[1];
buzbee0d829482013-10-11 15:24:55 -0700874 const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]);
875 const int32_t* targets = &keys[entries];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700876 LOG(INFO) << "Sparse switch table - ident:0x" << std::hex << ident
877 << ", entries: " << std::dec << entries;
878 for (int i = 0; i < entries; i++) {
879 LOG(INFO) << " Key[" << keys[i] << "] -> 0x" << std::hex << targets[i];
880 }
881}
882
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700883void Mir2Lir::DumpPackedSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700884 /*
885 * Packed switch data format:
886 * ushort ident = 0x0100 magic value
887 * ushort size number of entries in the table
888 * int first_key first (and lowest) switch case value
889 * int targets[size] branch targets, relative to switch opcode
890 *
891 * Total size is (4+size*2) 16-bit code units.
892 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700893 uint16_t ident = table[0];
buzbee0d829482013-10-11 15:24:55 -0700894 const int32_t* targets = reinterpret_cast<const int32_t*>(&table[4]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700895 int entries = table[1];
896 int low_key = s4FromSwitchData(&table[2]);
897 LOG(INFO) << "Packed switch table - ident:0x" << std::hex << ident
898 << ", entries: " << std::dec << entries << ", low_key: " << low_key;
899 for (int i = 0; i < entries; i++) {
900 LOG(INFO) << " Key[" << (i + low_key) << "] -> 0x" << std::hex
901 << targets[i];
902 }
903}
904
buzbee252254b2013-09-08 16:20:53 -0700905/* Set up special LIR to mark a Dalvik byte-code instruction start for pretty printing */
buzbee0d829482013-10-11 15:24:55 -0700906void Mir2Lir::MarkBoundary(DexOffset offset, const char* inst_str) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700907 UNUSED(offset);
buzbee0d829482013-10-11 15:24:55 -0700908 // NOTE: only used for debug listings.
909 NewLIR1(kPseudoDalvikByteCodeBoundary, WrapPointer(ArenaStrdup(inst_str)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700910}
911
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700912bool Mir2Lir::EvaluateBranch(Instruction::Code opcode, int32_t src1, int32_t src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700913 bool is_taken;
914 switch (opcode) {
915 case Instruction::IF_EQ: is_taken = (src1 == src2); break;
916 case Instruction::IF_NE: is_taken = (src1 != src2); break;
917 case Instruction::IF_LT: is_taken = (src1 < src2); break;
918 case Instruction::IF_GE: is_taken = (src1 >= src2); break;
919 case Instruction::IF_GT: is_taken = (src1 > src2); break;
920 case Instruction::IF_LE: is_taken = (src1 <= src2); break;
921 case Instruction::IF_EQZ: is_taken = (src1 == 0); break;
922 case Instruction::IF_NEZ: is_taken = (src1 != 0); break;
923 case Instruction::IF_LTZ: is_taken = (src1 < 0); break;
924 case Instruction::IF_GEZ: is_taken = (src1 >= 0); break;
925 case Instruction::IF_GTZ: is_taken = (src1 > 0); break;
926 case Instruction::IF_LEZ: is_taken = (src1 <= 0); break;
927 default:
928 LOG(FATAL) << "Unexpected opcode " << opcode;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700929 UNREACHABLE();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700930 }
931 return is_taken;
932}
933
934// Convert relation of src1/src2 to src2/src1
935ConditionCode Mir2Lir::FlipComparisonOrder(ConditionCode before) {
936 ConditionCode res;
937 switch (before) {
938 case kCondEq: res = kCondEq; break;
939 case kCondNe: res = kCondNe; break;
940 case kCondLt: res = kCondGt; break;
941 case kCondGt: res = kCondLt; break;
942 case kCondLe: res = kCondGe; break;
943 case kCondGe: res = kCondLe; break;
944 default:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700945 LOG(FATAL) << "Unexpected ccode " << before;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700946 UNREACHABLE();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700947 }
948 return res;
949}
950
Vladimir Markoa1a70742014-03-03 10:28:05 +0000951ConditionCode Mir2Lir::NegateComparison(ConditionCode before) {
952 ConditionCode res;
953 switch (before) {
954 case kCondEq: res = kCondNe; break;
955 case kCondNe: res = kCondEq; break;
956 case kCondLt: res = kCondGe; break;
957 case kCondGt: res = kCondLe; break;
958 case kCondLe: res = kCondGt; break;
959 case kCondGe: res = kCondLt; break;
960 default:
Vladimir Markoa1a70742014-03-03 10:28:05 +0000961 LOG(FATAL) << "Unexpected ccode " << before;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700962 UNREACHABLE();
Vladimir Markoa1a70742014-03-03 10:28:05 +0000963 }
964 return res;
965}
966
Brian Carlstrom7940e442013-07-12 13:46:57 -0700967// TODO: move to mir_to_lir.cc
968Mir2Lir::Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena)
969 : Backend(arena),
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700970 literal_list_(nullptr),
971 method_literal_list_(nullptr),
972 class_literal_list_(nullptr),
973 code_literal_list_(nullptr),
974 first_fixup_(nullptr),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700975 cu_(cu),
976 mir_graph_(mir_graph),
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100977 switch_tables_(arena->Adapter(kArenaAllocSwitchTable)),
978 fill_array_data_(arena->Adapter(kArenaAllocFillArrayData)),
979 tempreg_info_(arena->Adapter()),
980 reginfo_map_(arena->Adapter()),
981 pointer_storage_(arena->Adapter()),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700982 data_offset_(0),
983 total_size_(0),
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700984 block_label_list_(nullptr),
985 promotion_map_(nullptr),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700986 current_dalvik_offset_(0),
buzbeeb48819d2013-09-14 16:15:25 -0700987 estimated_native_code_size_(0),
Vladimir Markoe39c54e2014-09-22 14:50:02 +0100988 reg_pool_(nullptr),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700989 live_sreg_(0),
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100990 core_vmap_table_(mir_graph->GetArena()->Adapter()),
991 fp_vmap_table_(mir_graph->GetArena()->Adapter()),
Vladimir Markof4da6752014-08-01 19:04:18 +0100992 patches_(mir_graph->GetArena()->Adapter()),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700993 num_core_spills_(0),
994 num_fp_spills_(0),
995 frame_size_(0),
996 core_spill_mask_(0),
997 fp_spill_mask_(0),
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700998 first_lir_insn_(nullptr),
999 last_lir_insn_(nullptr),
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001000 slow_paths_(arena->Adapter(kArenaAllocSlowPaths)),
Vladimir Marko8dea81c2014-06-06 14:50:36 +01001001 mem_ref_type_(ResourceMask::kHeapRef),
1002 mask_cache_(arena) {
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001003 switch_tables_.reserve(4);
1004 fill_array_data_.reserve(4);
1005 tempreg_info_.reserve(20);
1006 reginfo_map_.reserve(RegStorage::kMaxRegs);
1007 pointer_storage_.reserve(128);
1008 slow_paths_.reserve(32);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001009 // Reserve pointer id 0 for nullptr.
1010 size_t null_idx = WrapPointer(nullptr);
buzbee0d829482013-10-11 15:24:55 -07001011 DCHECK_EQ(null_idx, 0U);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001012}
1013
1014void Mir2Lir::Materialize() {
buzbeea61f4952013-08-23 14:27:06 -07001015 cu_->NewTimingSplit("RegisterAllocation");
Brian Carlstrom7940e442013-07-12 13:46:57 -07001016 CompilerInitializeRegAlloc(); // Needs to happen after SSA naming
1017
1018 /* Allocate Registers using simple local allocation scheme */
1019 SimpleRegAlloc();
1020
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001021 /* First try the custom light codegen for special cases. */
Vladimir Marko5816ed42013-11-27 17:04:20 +00001022 DCHECK(cu_->compiler_driver->GetMethodInlinerMap() != nullptr);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001023 bool special_worked = cu_->compiler_driver->GetMethodInlinerMap()->GetMethodInliner(cu_->dex_file)
Vladimir Marko5816ed42013-11-27 17:04:20 +00001024 ->GenSpecial(this, cu_->method_idx);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001025
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -08001026 /* Take normal path for converting MIR to LIR only if the special codegen did not succeed. */
1027 if (special_worked == false) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001028 MethodMIR2LIR();
1029 }
1030
1031 /* Method is not empty */
1032 if (first_lir_insn_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001033 // mark the targets of switch statement case labels
1034 ProcessSwitchTables();
1035
1036 /* Convert LIR into machine code. */
1037 AssembleLIR();
1038
buzbeeb01bf152014-05-13 15:59:07 -07001039 if ((cu_->enable_debug & (1 << kDebugCodegenDump)) != 0) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001040 CodegenDump();
1041 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001042 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001043}
1044
1045CompiledMethod* Mir2Lir::GetCompiledMethod() {
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001046 // Combine vmap tables - core regs, then fp regs - into vmap_table.
1047 Leb128EncodingVector vmap_encoder;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001048 if (frame_size_ > 0) {
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001049 // Prefix the encoded data with its size.
1050 size_t size = core_vmap_table_.size() + 1 /* marker */ + fp_vmap_table_.size();
1051 vmap_encoder.Reserve(size + 1u); // All values are likely to be one byte in ULEB128 (<128).
1052 vmap_encoder.PushBackUnsigned(size);
1053 // Core regs may have been inserted out of order - sort first.
1054 std::sort(core_vmap_table_.begin(), core_vmap_table_.end());
1055 for (size_t i = 0 ; i < core_vmap_table_.size(); ++i) {
1056 // Copy, stripping out the phys register sort key.
1057 vmap_encoder.PushBackUnsigned(
1058 ~(-1 << VREG_NUM_WIDTH) & (core_vmap_table_[i] + VmapTable::kEntryAdjustment));
1059 }
1060 // Push a marker to take place of lr.
1061 vmap_encoder.PushBackUnsigned(VmapTable::kAdjustedFpMarker);
Serguei Katkovc3801912014-07-08 17:21:53 +07001062 if (cu_->instruction_set == kThumb2) {
1063 // fp regs already sorted.
1064 for (uint32_t i = 0; i < fp_vmap_table_.size(); i++) {
1065 vmap_encoder.PushBackUnsigned(fp_vmap_table_[i] + VmapTable::kEntryAdjustment);
1066 }
1067 } else {
1068 // For other platforms regs may have been inserted out of order - sort first.
1069 std::sort(fp_vmap_table_.begin(), fp_vmap_table_.end());
1070 for (size_t i = 0 ; i < fp_vmap_table_.size(); ++i) {
1071 // Copy, stripping out the phys register sort key.
1072 vmap_encoder.PushBackUnsigned(
1073 ~(-1 << VREG_NUM_WIDTH) & (fp_vmap_table_[i] + VmapTable::kEntryAdjustment));
1074 }
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001075 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001076 } else {
Vladimir Marko81949632014-05-02 11:53:22 +01001077 DCHECK_EQ(POPCOUNT(core_spill_mask_), 0);
1078 DCHECK_EQ(POPCOUNT(fp_spill_mask_), 0);
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001079 DCHECK_EQ(core_vmap_table_.size(), 0u);
1080 DCHECK_EQ(fp_vmap_table_.size(), 0u);
1081 vmap_encoder.PushBackUnsigned(0u); // Size is 0.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001082 }
Mark Mendellae9fd932014-02-10 16:14:35 -08001083
Vladimir Markof4da6752014-08-01 19:04:18 +01001084 // Sort patches by literal offset for better deduplication.
1085 std::sort(patches_.begin(), patches_.end(), [](const LinkerPatch& lhs, const LinkerPatch& rhs) {
1086 return lhs.LiteralOffset() < rhs.LiteralOffset();
1087 });
1088
Tong Shen547cdfd2014-08-05 01:54:19 -07001089 std::unique_ptr<std::vector<uint8_t>> cfi_info(ReturnFrameDescriptionEntry());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001090 CompiledMethod* result =
Ian Rogers72d32622014-05-06 16:20:11 -07001091 new CompiledMethod(cu_->compiler_driver, cu_->instruction_set, code_buffer_, frame_size_,
Yevgeny Roubane3ea8382014-08-08 16:29:38 +07001092 core_spill_mask_, fp_spill_mask_, &src_mapping_table_, encoded_mapping_table_,
Vladimir Markof4da6752014-08-01 19:04:18 +01001093 vmap_encoder.GetData(), native_gc_map_, cfi_info.get(),
1094 ArrayRef<LinkerPatch>(patches_));
Brian Carlstrom7940e442013-07-12 13:46:57 -07001095 return result;
1096}
1097
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -08001098size_t Mir2Lir::GetMaxPossibleCompilerTemps() const {
1099 // Chose a reasonably small value in order to contain stack growth.
1100 // Backends that are smarter about spill region can return larger values.
1101 const size_t max_compiler_temps = 10;
1102 return max_compiler_temps;
1103}
1104
1105size_t Mir2Lir::GetNumBytesForCompilerTempSpillRegion() {
1106 // By default assume that the Mir2Lir will need one slot for each temporary.
1107 // If the backend can better determine temps that have non-overlapping ranges and
1108 // temps that do not need spilled, it can actually provide a small region.
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07001109 mir_graph_->CommitCompilerTemps();
1110 return mir_graph_->GetNumBytesForSpecialTemps() + mir_graph_->GetMaximumBytesForNonSpecialTemps();
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -08001111}
1112
Brian Carlstrom7940e442013-07-12 13:46:57 -07001113int Mir2Lir::ComputeFrameSize() {
1114 /* Figure out the frame size */
Dmitry Petrochenkof29a4242014-05-05 20:28:47 +07001115 uint32_t size = num_core_spills_ * GetBytesPerGprSpillLocation(cu_->instruction_set)
1116 + num_fp_spills_ * GetBytesPerFprSpillLocation(cu_->instruction_set)
1117 + sizeof(uint32_t) // Filler.
Razvan A Lupusoru8d0d03e2014-06-06 17:04:52 -07001118 + mir_graph_->GetNumOfLocalCodeVRs() * sizeof(uint32_t)
1119 + mir_graph_->GetNumOfOutVRs() * sizeof(uint32_t)
Dmitry Petrochenkof29a4242014-05-05 20:28:47 +07001120 + GetNumBytesForCompilerTempSpillRegion();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001121 /* Align and set */
Andreas Gampe66018822014-05-05 20:47:19 -07001122 return RoundUp(size, kStackAlignment);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001123}
1124
1125/*
1126 * Append an LIR instruction to the LIR list maintained by a compilation
1127 * unit
1128 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001129void Mir2Lir::AppendLIR(LIR* lir) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001130 if (first_lir_insn_ == nullptr) {
1131 DCHECK(last_lir_insn_ == nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001132 last_lir_insn_ = first_lir_insn_ = lir;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001133 lir->prev = lir->next = nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001134 } else {
1135 last_lir_insn_->next = lir;
1136 lir->prev = last_lir_insn_;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001137 lir->next = nullptr;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001138 last_lir_insn_ = lir;
1139 }
1140}
1141
1142/*
1143 * Insert an LIR instruction before the current instruction, which cannot be the
1144 * first instruction.
1145 *
1146 * prev_lir <-> new_lir <-> current_lir
1147 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001148void Mir2Lir::InsertLIRBefore(LIR* current_lir, LIR* new_lir) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001149 DCHECK(current_lir->prev != nullptr);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001150 LIR *prev_lir = current_lir->prev;
1151
1152 prev_lir->next = new_lir;
1153 new_lir->prev = prev_lir;
1154 new_lir->next = current_lir;
1155 current_lir->prev = new_lir;
1156}
1157
1158/*
1159 * Insert an LIR instruction after the current instruction, which cannot be the
Andreas Gampe3c12c512014-06-24 18:46:29 +00001160 * last instruction.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001161 *
1162 * current_lir -> new_lir -> old_next
1163 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001164void Mir2Lir::InsertLIRAfter(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001165 new_lir->prev = current_lir;
1166 new_lir->next = current_lir->next;
1167 current_lir->next = new_lir;
1168 new_lir->next->prev = new_lir;
1169}
1170
Mark Mendell4708dcd2014-01-22 09:05:18 -08001171bool Mir2Lir::IsPowerOfTwo(uint64_t x) {
1172 return (x & (x - 1)) == 0;
1173}
1174
1175// Returns the index of the lowest set bit in 'x'.
1176int32_t Mir2Lir::LowestSetBit(uint64_t x) {
1177 int bit_posn = 0;
1178 while ((x & 0xf) == 0) {
1179 bit_posn += 4;
1180 x >>= 4;
1181 }
1182 while ((x & 1) == 0) {
1183 bit_posn++;
1184 x >>= 1;
1185 }
1186 return bit_posn;
1187}
1188
Alexei Zavjalovd8c3e362014-10-08 15:51:59 +07001189bool Mir2Lir::PartiallyIntersects(RegLocation rl_src, RegLocation rl_dest) {
Mark Mendell4708dcd2014-01-22 09:05:18 -08001190 DCHECK(rl_src.wide);
1191 DCHECK(rl_dest.wide);
1192 return (abs(mir_graph_->SRegToVReg(rl_src.s_reg_low) - mir_graph_->SRegToVReg(rl_dest.s_reg_low)) == 1);
1193}
1194
Alexei Zavjalovd8c3e362014-10-08 15:51:59 +07001195bool Mir2Lir::Intersects(RegLocation rl_src, RegLocation rl_dest) {
1196 DCHECK(rl_src.wide);
1197 DCHECK(rl_dest.wide);
1198 return (abs(mir_graph_->SRegToVReg(rl_src.s_reg_low) - mir_graph_->SRegToVReg(rl_dest.s_reg_low)) <= 1);
1199}
1200
buzbee2700f7e2014-03-07 09:46:20 -08001201LIR *Mir2Lir::OpCmpMemImmBranch(ConditionCode cond, RegStorage temp_reg, RegStorage base_reg,
Dave Allison69dfe512014-07-11 17:11:58 +00001202 int offset, int check_value, LIR* target, LIR** compare) {
Mark Mendell766e9292014-01-27 07:55:47 -08001203 // Handle this for architectures that can't compare to memory.
Dave Allison69dfe512014-07-11 17:11:58 +00001204 LIR* inst = Load32Disp(base_reg, offset, temp_reg);
1205 if (compare != nullptr) {
1206 *compare = inst;
1207 }
Mark Mendell766e9292014-01-27 07:55:47 -08001208 LIR* branch = OpCmpImmBranch(cond, temp_reg, check_value, target);
1209 return branch;
1210}
1211
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001212void Mir2Lir::AddSlowPath(LIRSlowPath* slowpath) {
Vladimir Markoe39c54e2014-09-22 14:50:02 +01001213 slow_paths_.push_back(slowpath);
Serguei Katkov589e0462014-09-05 18:37:22 +07001214 ResetDefTracking();
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001215}
Mark Mendell55d0eac2014-02-06 11:02:52 -08001216
Jeff Hao49161ce2014-03-12 11:05:25 -07001217void Mir2Lir::LoadCodeAddress(const MethodReference& target_method, InvokeType type,
1218 SpecialTargetRegister symbolic_reg) {
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001219 LIR* data_target = ScanLiteralPoolMethod(code_literal_list_, target_method);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001220 if (data_target == nullptr) {
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001221 data_target = AddWordData(&code_literal_list_, target_method.dex_method_index);
Jeff Hao49161ce2014-03-12 11:05:25 -07001222 data_target->operands[1] = WrapPointer(const_cast<DexFile*>(target_method.dex_file));
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001223 // NOTE: The invoke type doesn't contribute to the literal identity. In fact, we can have
1224 // the same method invoked with kVirtual, kSuper and kInterface but the class linker will
1225 // resolve these invokes to the same method, so we don't care which one we record here.
Jeff Hao49161ce2014-03-12 11:05:25 -07001226 data_target->operands[2] = type;
Mark Mendell55d0eac2014-02-06 11:02:52 -08001227 }
Chao-ying Fua77ee512014-07-01 17:43:41 -07001228 // Loads a code pointer. Code from oat file can be mapped anywhere.
1229 LIR* load_pc_rel = OpPcRelLoad(TargetPtrReg(symbolic_reg), data_target);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001230 AppendLIR(load_pc_rel);
1231 DCHECK_NE(cu_->instruction_set, kMips) << reinterpret_cast<void*>(data_target);
1232}
1233
Jeff Hao49161ce2014-03-12 11:05:25 -07001234void Mir2Lir::LoadMethodAddress(const MethodReference& target_method, InvokeType type,
1235 SpecialTargetRegister symbolic_reg) {
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001236 LIR* data_target = ScanLiteralPoolMethod(method_literal_list_, target_method);
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001237 if (data_target == nullptr) {
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001238 data_target = AddWordData(&method_literal_list_, target_method.dex_method_index);
Jeff Hao49161ce2014-03-12 11:05:25 -07001239 data_target->operands[1] = WrapPointer(const_cast<DexFile*>(target_method.dex_file));
Vladimir Markoa51a0b02014-05-21 12:08:39 +01001240 // NOTE: The invoke type doesn't contribute to the literal identity. In fact, we can have
1241 // the same method invoked with kVirtual, kSuper and kInterface but the class linker will
1242 // resolve these invokes to the same method, so we don't care which one we record here.
Jeff Hao49161ce2014-03-12 11:05:25 -07001243 data_target->operands[2] = type;
Mark Mendell55d0eac2014-02-06 11:02:52 -08001244 }
Chao-ying Fua77ee512014-07-01 17:43:41 -07001245 // Loads an ArtMethod pointer, which is a reference as it lives in the heap.
Andreas Gampeccc60262014-07-04 18:02:38 -07001246 LIR* load_pc_rel = OpPcRelLoad(TargetReg(symbolic_reg, kRef), data_target);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001247 AppendLIR(load_pc_rel);
1248 DCHECK_NE(cu_->instruction_set, kMips) << reinterpret_cast<void*>(data_target);
1249}
1250
Fred Shihe7f82e22014-08-06 10:46:37 -07001251void Mir2Lir::LoadClassType(const DexFile& dex_file, uint32_t type_idx,
1252 SpecialTargetRegister symbolic_reg) {
Mark Mendell55d0eac2014-02-06 11:02:52 -08001253 // Use the literal pool and a PC-relative load from a data word.
Fred Shihe7f82e22014-08-06 10:46:37 -07001254 LIR* data_target = ScanLiteralPoolClass(class_literal_list_, dex_file, type_idx);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001255 if (data_target == nullptr) {
1256 data_target = AddWordData(&class_literal_list_, type_idx);
Fred Shih4fc78532014-08-06 16:44:22 -07001257 data_target->operands[1] = WrapPointer(const_cast<DexFile*>(&dex_file));
Mark Mendell55d0eac2014-02-06 11:02:52 -08001258 }
Chao-ying Fua77ee512014-07-01 17:43:41 -07001259 // Loads a Class pointer, which is a reference as it lives in the heap.
Andreas Gampeccc60262014-07-04 18:02:38 -07001260 LIR* load_pc_rel = OpPcRelLoad(TargetReg(symbolic_reg, kRef), data_target);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001261 AppendLIR(load_pc_rel);
1262}
1263
Tong Shen547cdfd2014-08-05 01:54:19 -07001264std::vector<uint8_t>* Mir2Lir::ReturnFrameDescriptionEntry() {
Mark Mendellae9fd932014-02-10 16:14:35 -08001265 // Default case is to do nothing.
1266 return nullptr;
1267}
1268
buzbee2700f7e2014-03-07 09:46:20 -08001269RegLocation Mir2Lir::NarrowRegLoc(RegLocation loc) {
buzbee091cc402014-03-31 10:14:40 -07001270 if (loc.location == kLocPhysReg) {
buzbee85089dd2014-05-25 15:10:52 -07001271 DCHECK(!loc.reg.Is32Bit());
buzbee091cc402014-03-31 10:14:40 -07001272 if (loc.reg.IsPair()) {
buzbee85089dd2014-05-25 15:10:52 -07001273 RegisterInfo* info_lo = GetRegInfo(loc.reg.GetLow());
1274 RegisterInfo* info_hi = GetRegInfo(loc.reg.GetHigh());
1275 info_lo->SetIsWide(false);
1276 info_hi->SetIsWide(false);
1277 loc.reg = info_lo->GetReg();
buzbee091cc402014-03-31 10:14:40 -07001278 } else {
buzbee85089dd2014-05-25 15:10:52 -07001279 RegisterInfo* info = GetRegInfo(loc.reg);
1280 RegisterInfo* info_new = info->FindMatchingView(RegisterInfo::k32SoloStorageMask);
1281 DCHECK(info_new != nullptr);
1282 if (info->IsLive() && (info->SReg() == loc.s_reg_low)) {
1283 info->MarkDead();
1284 info_new->MarkLive(loc.s_reg_low);
1285 }
1286 loc.reg = info_new->GetReg();
buzbee091cc402014-03-31 10:14:40 -07001287 }
buzbee85089dd2014-05-25 15:10:52 -07001288 DCHECK(loc.reg.Valid());
buzbee2700f7e2014-03-07 09:46:20 -08001289 }
buzbee85089dd2014-05-25 15:10:52 -07001290 loc.wide = false;
buzbee2700f7e2014-03-07 09:46:20 -08001291 return loc;
1292}
1293
Mark Mendelld65c51a2014-04-29 16:55:20 -04001294void Mir2Lir::GenMachineSpecificExtendedMethodMIR(BasicBlock* bb, MIR* mir) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001295 UNUSED(bb, mir);
Mark Mendelld65c51a2014-04-29 16:55:20 -04001296 LOG(FATAL) << "Unknown MIR opcode not supported on this architecture";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001297 UNREACHABLE();
Mark Mendelld65c51a2014-04-29 16:55:20 -04001298}
1299
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001300} // namespace art