blob: 29554c0977a7baf130c4e0bc4f18891e27e8d059 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex_file-inl.h"
19#include "gc_map.h"
Ian Rogers96faf5b2013-08-09 22:05:32 -070020#include "mapping_table.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070021#include "mir_to_lir-inl.h"
Vladimir Marko5816ed42013-11-27 17:04:20 +000022#include "dex/quick/dex_file_method_inliner.h"
23#include "dex/quick/dex_file_to_method_inliner_map.h"
Vladimir Marko2b5eaa22013-12-13 13:59:30 +000024#include "dex/verified_methods_data.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070025#include "verifier/dex_gc_map.h"
26#include "verifier/method_verifier.h"
27
28namespace art {
29
Vladimir Marko06606b92013-12-02 15:31:08 +000030namespace {
31
32/* Dump a mapping table */
33template <typename It>
34void DumpMappingTable(const char* table_name, const char* descriptor, const char* name,
35 const Signature& signature, uint32_t size, It first) {
36 if (size != 0) {
37 std::string line(StringPrintf("\n %s %s%s_%s_table[%zu] = {", table_name,
38 descriptor, name, signature.ToString().c_str(), size));
39 std::replace(line.begin(), line.end(), ';', '_');
40 LOG(INFO) << line;
41 for (uint32_t i = 0; i != size; ++i) {
42 line = StringPrintf(" {0x%05x, 0x%04x},", first.NativePcOffset(), first.DexPc());
43 ++first;
44 LOG(INFO) << line;
45 }
46 LOG(INFO) <<" };\n\n";
47 }
48}
49
50} // anonymous namespace
51
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070052bool Mir2Lir::IsInexpensiveConstant(RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070053 bool res = false;
54 if (rl_src.is_const) {
55 if (rl_src.wide) {
56 if (rl_src.fp) {
57 res = InexpensiveConstantDouble(mir_graph_->ConstantValueWide(rl_src));
58 } else {
59 res = InexpensiveConstantLong(mir_graph_->ConstantValueWide(rl_src));
60 }
61 } else {
62 if (rl_src.fp) {
63 res = InexpensiveConstantFloat(mir_graph_->ConstantValue(rl_src));
64 } else {
65 res = InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src));
66 }
67 }
68 }
69 return res;
70}
71
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070072void Mir2Lir::MarkSafepointPC(LIR* inst) {
buzbeeb48819d2013-09-14 16:15:25 -070073 DCHECK(!inst->flags.use_def_invalid);
74 inst->u.m.def_mask = ENCODE_ALL;
Brian Carlstrom7940e442013-07-12 13:46:57 -070075 LIR* safepoint_pc = NewLIR0(kPseudoSafepointPC);
buzbeeb48819d2013-09-14 16:15:25 -070076 DCHECK_EQ(safepoint_pc->u.m.def_mask, ENCODE_ALL);
Brian Carlstrom7940e442013-07-12 13:46:57 -070077}
78
Ian Rogers9b297bf2013-09-06 11:11:25 -070079bool Mir2Lir::FastInstance(uint32_t field_idx, bool is_put, int* field_offset, bool* is_volatile) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070080 return cu_->compiler_driver->ComputeInstanceFieldInfo(
Ian Rogers9b297bf2013-09-06 11:11:25 -070081 field_idx, mir_graph_->GetCurrentDexCompilationUnit(), is_put, field_offset, is_volatile);
Brian Carlstrom7940e442013-07-12 13:46:57 -070082}
83
buzbee252254b2013-09-08 16:20:53 -070084/* Remove a LIR from the list. */
85void Mir2Lir::UnlinkLIR(LIR* lir) {
86 if (UNLIKELY(lir == first_lir_insn_)) {
87 first_lir_insn_ = lir->next;
88 if (lir->next != NULL) {
89 lir->next->prev = NULL;
90 } else {
91 DCHECK(lir->next == NULL);
92 DCHECK(lir == last_lir_insn_);
93 last_lir_insn_ = NULL;
94 }
95 } else if (lir == last_lir_insn_) {
96 last_lir_insn_ = lir->prev;
97 lir->prev->next = NULL;
98 } else if ((lir->prev != NULL) && (lir->next != NULL)) {
99 lir->prev->next = lir->next;
100 lir->next->prev = lir->prev;
101 }
102}
103
Brian Carlstrom7940e442013-07-12 13:46:57 -0700104/* Convert an instruction to a NOP */
Brian Carlstromdf629502013-07-17 22:39:56 -0700105void Mir2Lir::NopLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700106 lir->flags.is_nop = true;
buzbee252254b2013-09-08 16:20:53 -0700107 if (!cu_->verbose) {
108 UnlinkLIR(lir);
109 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700110}
111
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700112void Mir2Lir::SetMemRefType(LIR* lir, bool is_load, int mem_type) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700113 uint64_t *mask_ptr;
Brian Carlstromf69863b2013-07-17 21:53:13 -0700114 uint64_t mask = ENCODE_MEM;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700115 DCHECK(GetTargetInstFlags(lir->opcode) & (IS_LOAD | IS_STORE));
buzbeeb48819d2013-09-14 16:15:25 -0700116 DCHECK(!lir->flags.use_def_invalid);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700117 if (is_load) {
buzbeeb48819d2013-09-14 16:15:25 -0700118 mask_ptr = &lir->u.m.use_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700119 } else {
buzbeeb48819d2013-09-14 16:15:25 -0700120 mask_ptr = &lir->u.m.def_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700121 }
122 /* Clear out the memref flags */
123 *mask_ptr &= ~mask;
124 /* ..and then add back the one we need */
125 switch (mem_type) {
126 case kLiteral:
127 DCHECK(is_load);
128 *mask_ptr |= ENCODE_LITERAL;
129 break;
130 case kDalvikReg:
131 *mask_ptr |= ENCODE_DALVIK_REG;
132 break;
133 case kHeapRef:
134 *mask_ptr |= ENCODE_HEAP_REF;
135 break;
136 case kMustNotAlias:
137 /* Currently only loads can be marked as kMustNotAlias */
138 DCHECK(!(GetTargetInstFlags(lir->opcode) & IS_STORE));
139 *mask_ptr |= ENCODE_MUST_NOT_ALIAS;
140 break;
141 default:
142 LOG(FATAL) << "Oat: invalid memref kind - " << mem_type;
143 }
144}
145
146/*
147 * Mark load/store instructions that access Dalvik registers through the stack.
148 */
149void Mir2Lir::AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700150 bool is64bit) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700151 SetMemRefType(lir, is_load, kDalvikReg);
152
153 /*
154 * Store the Dalvik register id in alias_info. Mark the MSB if it is a 64-bit
155 * access.
156 */
buzbeeb48819d2013-09-14 16:15:25 -0700157 lir->flags.alias_info = ENCODE_ALIAS_INFO(reg_id, is64bit);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700158}
159
160/*
161 * Debugging macros
162 */
163#define DUMP_RESOURCE_MASK(X)
164
165/* Pretty-print a LIR instruction */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700166void Mir2Lir::DumpLIRInsn(LIR* lir, unsigned char* base_addr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700167 int offset = lir->offset;
168 int dest = lir->operands[0];
169 const bool dump_nop = (cu_->enable_debug & (1 << kDebugShowNops));
170
171 /* Handle pseudo-ops individually, and all regular insns as a group */
172 switch (lir->opcode) {
173 case kPseudoMethodEntry:
174 LOG(INFO) << "-------- method entry "
175 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
176 break;
177 case kPseudoMethodExit:
178 LOG(INFO) << "-------- Method_Exit";
179 break;
180 case kPseudoBarrier:
181 LOG(INFO) << "-------- BARRIER";
182 break;
183 case kPseudoEntryBlock:
184 LOG(INFO) << "-------- entry offset: 0x" << std::hex << dest;
185 break;
186 case kPseudoDalvikByteCodeBoundary:
187 if (lir->operands[0] == 0) {
buzbee0d829482013-10-11 15:24:55 -0700188 // NOTE: only used for debug listings.
189 lir->operands[0] = WrapPointer(ArenaStrdup("No instruction string"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700190 }
191 LOG(INFO) << "-------- dalvik offset: 0x" << std::hex
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000192 << lir->dalvik_offset << " @ "
193 << reinterpret_cast<char*>(UnwrapPointer(lir->operands[0]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700194 break;
195 case kPseudoExitBlock:
196 LOG(INFO) << "-------- exit offset: 0x" << std::hex << dest;
197 break;
198 case kPseudoPseudoAlign4:
199 LOG(INFO) << reinterpret_cast<uintptr_t>(base_addr) + offset << " (0x" << std::hex
200 << offset << "): .align4";
201 break;
202 case kPseudoEHBlockLabel:
203 LOG(INFO) << "Exception_Handling:";
204 break;
205 case kPseudoTargetLabel:
206 case kPseudoNormalBlockLabel:
207 LOG(INFO) << "L" << reinterpret_cast<void*>(lir) << ":";
208 break;
209 case kPseudoThrowTarget:
210 LOG(INFO) << "LT" << reinterpret_cast<void*>(lir) << ":";
211 break;
212 case kPseudoIntrinsicRetry:
213 LOG(INFO) << "IR" << reinterpret_cast<void*>(lir) << ":";
214 break;
215 case kPseudoSuspendTarget:
216 LOG(INFO) << "LS" << reinterpret_cast<void*>(lir) << ":";
217 break;
218 case kPseudoSafepointPC:
219 LOG(INFO) << "LsafepointPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
220 break;
221 case kPseudoExportedPC:
222 LOG(INFO) << "LexportedPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
223 break;
224 case kPseudoCaseLabel:
225 LOG(INFO) << "LC" << reinterpret_cast<void*>(lir) << ": Case target 0x"
226 << std::hex << lir->operands[0] << "|" << std::dec <<
227 lir->operands[0];
228 break;
229 default:
230 if (lir->flags.is_nop && !dump_nop) {
231 break;
232 } else {
233 std::string op_name(BuildInsnString(GetTargetInstName(lir->opcode),
234 lir, base_addr));
235 std::string op_operands(BuildInsnString(GetTargetInstFmt(lir->opcode),
236 lir, base_addr));
237 LOG(INFO) << StringPrintf("%05x: %-9s%s%s",
238 reinterpret_cast<unsigned int>(base_addr + offset),
239 op_name.c_str(), op_operands.c_str(),
240 lir->flags.is_nop ? "(nop)" : "");
241 }
242 break;
243 }
244
buzbeeb48819d2013-09-14 16:15:25 -0700245 if (lir->u.m.use_mask && (!lir->flags.is_nop || dump_nop)) {
246 DUMP_RESOURCE_MASK(DumpResourceMask(lir, lir->u.m.use_mask, "use"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700247 }
buzbeeb48819d2013-09-14 16:15:25 -0700248 if (lir->u.m.def_mask && (!lir->flags.is_nop || dump_nop)) {
249 DUMP_RESOURCE_MASK(DumpResourceMask(lir, lir->u.m.def_mask, "def"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700250 }
251}
252
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700253void Mir2Lir::DumpPromotionMap() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700254 int num_regs = cu_->num_dalvik_registers + cu_->num_compiler_temps + 1;
255 for (int i = 0; i < num_regs; i++) {
256 PromotionMap v_reg_map = promotion_map_[i];
257 std::string buf;
258 if (v_reg_map.fp_location == kLocPhysReg) {
259 StringAppendF(&buf, " : s%d", v_reg_map.FpReg & FpRegMask());
260 }
261
262 std::string buf3;
263 if (i < cu_->num_dalvik_registers) {
264 StringAppendF(&buf3, "%02d", i);
265 } else if (i == mir_graph_->GetMethodSReg()) {
266 buf3 = "Method*";
267 } else {
268 StringAppendF(&buf3, "ct%d", i - cu_->num_dalvik_registers);
269 }
270
271 LOG(INFO) << StringPrintf("V[%s] -> %s%d%s", buf3.c_str(),
272 v_reg_map.core_location == kLocPhysReg ?
273 "r" : "SP+", v_reg_map.core_location == kLocPhysReg ?
274 v_reg_map.core_reg : SRegOffset(i),
275 buf.c_str());
276 }
277}
278
Brian Carlstrom7940e442013-07-12 13:46:57 -0700279/* Dump instructions and constant pool contents */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700280void Mir2Lir::CodegenDump() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700281 LOG(INFO) << "Dumping LIR insns for "
282 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
283 LIR* lir_insn;
284 int insns_size = cu_->code_item->insns_size_in_code_units_;
285
286 LOG(INFO) << "Regs (excluding ins) : " << cu_->num_regs;
287 LOG(INFO) << "Ins : " << cu_->num_ins;
288 LOG(INFO) << "Outs : " << cu_->num_outs;
289 LOG(INFO) << "CoreSpills : " << num_core_spills_;
290 LOG(INFO) << "FPSpills : " << num_fp_spills_;
291 LOG(INFO) << "CompilerTemps : " << cu_->num_compiler_temps;
292 LOG(INFO) << "Frame size : " << frame_size_;
293 LOG(INFO) << "code size is " << total_size_ <<
294 " bytes, Dalvik size is " << insns_size * 2;
295 LOG(INFO) << "expansion factor: "
296 << static_cast<float>(total_size_) / static_cast<float>(insns_size * 2);
297 DumpPromotionMap();
298 for (lir_insn = first_lir_insn_; lir_insn != NULL; lir_insn = lir_insn->next) {
299 DumpLIRInsn(lir_insn, 0);
300 }
301 for (lir_insn = literal_list_; lir_insn != NULL; lir_insn = lir_insn->next) {
302 LOG(INFO) << StringPrintf("%x (%04x): .word (%#x)", lir_insn->offset, lir_insn->offset,
303 lir_insn->operands[0]);
304 }
305
306 const DexFile::MethodId& method_id =
307 cu_->dex_file->GetMethodId(cu_->method_idx);
Ian Rogersd91d6d62013-09-25 20:26:14 -0700308 const Signature signature = cu_->dex_file->GetMethodSignature(method_id);
309 const char* name = cu_->dex_file->GetMethodName(method_id);
310 const char* descriptor(cu_->dex_file->GetMethodDeclaringClassDescriptor(method_id));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700311
312 // Dump mapping tables
Vladimir Marko06606b92013-12-02 15:31:08 +0000313 if (!encoded_mapping_table_.empty()) {
314 MappingTable table(&encoded_mapping_table_[0]);
315 DumpMappingTable("PC2Dex_MappingTable", descriptor, name, signature,
316 table.PcToDexSize(), table.PcToDexBegin());
317 DumpMappingTable("Dex2PC_MappingTable", descriptor, name, signature,
318 table.DexToPcSize(), table.DexToPcBegin());
319 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700320}
321
322/*
323 * Search the existing constants in the literal pool for an exact or close match
324 * within specified delta (greater or equal to 0).
325 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700326LIR* Mir2Lir::ScanLiteralPool(LIR* data_target, int value, unsigned int delta) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700327 while (data_target) {
328 if ((static_cast<unsigned>(value - data_target->operands[0])) <= delta)
329 return data_target;
330 data_target = data_target->next;
331 }
332 return NULL;
333}
334
335/* Search the existing constants in the literal pool for an exact wide match */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700336LIR* Mir2Lir::ScanLiteralPoolWide(LIR* data_target, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700337 bool lo_match = false;
338 LIR* lo_target = NULL;
339 while (data_target) {
340 if (lo_match && (data_target->operands[0] == val_hi)) {
341 // Record high word in case we need to expand this later.
342 lo_target->operands[1] = val_hi;
343 return lo_target;
344 }
345 lo_match = false;
346 if (data_target->operands[0] == val_lo) {
347 lo_match = true;
348 lo_target = data_target;
349 }
350 data_target = data_target->next;
351 }
352 return NULL;
353}
354
355/*
356 * The following are building blocks to insert constants into the pool or
357 * instruction streams.
358 */
359
360/* Add a 32-bit constant to the constant pool */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700361LIR* Mir2Lir::AddWordData(LIR* *constant_list_p, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700362 /* Add the constant to the literal pool */
363 if (constant_list_p) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700364 LIR* new_value = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), ArenaAllocator::kAllocData));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700365 new_value->operands[0] = value;
366 new_value->next = *constant_list_p;
367 *constant_list_p = new_value;
buzbeeb48819d2013-09-14 16:15:25 -0700368 estimated_native_code_size_ += sizeof(value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700369 return new_value;
370 }
371 return NULL;
372}
373
374/* Add a 64-bit constant to the constant pool or mixed with code */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700375LIR* Mir2Lir::AddWideData(LIR* *constant_list_p, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700376 AddWordData(constant_list_p, val_hi);
377 return AddWordData(constant_list_p, val_lo);
378}
379
380static void PushWord(std::vector<uint8_t>&buf, int data) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700381 buf.push_back(data & 0xff);
382 buf.push_back((data >> 8) & 0xff);
383 buf.push_back((data >> 16) & 0xff);
384 buf.push_back((data >> 24) & 0xff);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700385}
386
buzbee0d829482013-10-11 15:24:55 -0700387// Push 8 bytes on 64-bit systems; 4 on 32-bit systems.
388static void PushPointer(std::vector<uint8_t>&buf, void const* pointer) {
389 uintptr_t data = reinterpret_cast<uintptr_t>(pointer);
390 if (sizeof(void*) == sizeof(uint64_t)) {
391 PushWord(buf, (data >> (sizeof(void*) * 4)) & 0xFFFFFFFF);
392 PushWord(buf, data & 0xFFFFFFFF);
393 } else {
394 PushWord(buf, data);
395 }
396}
397
Brian Carlstrom7940e442013-07-12 13:46:57 -0700398static void AlignBuffer(std::vector<uint8_t>&buf, size_t offset) {
399 while (buf.size() < offset) {
400 buf.push_back(0);
401 }
402}
403
404/* Write the literal pool to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700405void Mir2Lir::InstallLiteralPools() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700406 AlignBuffer(code_buffer_, data_offset_);
407 LIR* data_lir = literal_list_;
408 while (data_lir != NULL) {
409 PushWord(code_buffer_, data_lir->operands[0]);
410 data_lir = NEXT_LIR(data_lir);
411 }
412 // Push code and method literals, record offsets for the compiler to patch.
413 data_lir = code_literal_list_;
414 while (data_lir != NULL) {
415 uint32_t target = data_lir->operands[0];
416 cu_->compiler_driver->AddCodePatch(cu_->dex_file,
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700417 cu_->class_def_idx,
418 cu_->method_idx,
419 cu_->invoke_type,
420 target,
421 static_cast<InvokeType>(data_lir->operands[1]),
422 code_buffer_.size());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700423 const DexFile::MethodId& id = cu_->dex_file->GetMethodId(target);
buzbee0d829482013-10-11 15:24:55 -0700424 // unique value based on target to ensure code deduplication works
425 PushPointer(code_buffer_, &id);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700426 data_lir = NEXT_LIR(data_lir);
427 }
428 data_lir = method_literal_list_;
429 while (data_lir != NULL) {
430 uint32_t target = data_lir->operands[0];
431 cu_->compiler_driver->AddMethodPatch(cu_->dex_file,
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700432 cu_->class_def_idx,
433 cu_->method_idx,
434 cu_->invoke_type,
435 target,
436 static_cast<InvokeType>(data_lir->operands[1]),
437 code_buffer_.size());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700438 const DexFile::MethodId& id = cu_->dex_file->GetMethodId(target);
buzbee0d829482013-10-11 15:24:55 -0700439 // unique value based on target to ensure code deduplication works
440 PushPointer(code_buffer_, &id);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700441 data_lir = NEXT_LIR(data_lir);
442 }
443}
444
445/* Write the switch tables to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700446void Mir2Lir::InstallSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700447 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
448 while (true) {
449 Mir2Lir::SwitchTable* tab_rec = iterator.Next();
450 if (tab_rec == NULL) break;
451 AlignBuffer(code_buffer_, tab_rec->offset);
452 /*
453 * For Arm, our reference point is the address of the bx
454 * instruction that does the launch, so we have to subtract
455 * the auto pc-advance. For other targets the reference point
456 * is a label, so we can use the offset as-is.
457 */
458 int bx_offset = INVALID_OFFSET;
459 switch (cu_->instruction_set) {
460 case kThumb2:
buzbeeb48819d2013-09-14 16:15:25 -0700461 DCHECK(tab_rec->anchor->flags.fixup != kFixupNone);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700462 bx_offset = tab_rec->anchor->offset + 4;
463 break;
464 case kX86:
465 bx_offset = 0;
466 break;
467 case kMips:
468 bx_offset = tab_rec->anchor->offset;
469 break;
470 default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set;
471 }
472 if (cu_->verbose) {
473 LOG(INFO) << "Switch table for offset 0x" << std::hex << bx_offset;
474 }
475 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
buzbee0d829482013-10-11 15:24:55 -0700476 const int32_t* keys = reinterpret_cast<const int32_t*>(&(tab_rec->table[2]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700477 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
478 int disp = tab_rec->targets[elems]->offset - bx_offset;
479 if (cu_->verbose) {
480 LOG(INFO) << " Case[" << elems << "] key: 0x"
481 << std::hex << keys[elems] << ", disp: 0x"
482 << std::hex << disp;
483 }
484 PushWord(code_buffer_, keys[elems]);
485 PushWord(code_buffer_,
486 tab_rec->targets[elems]->offset - bx_offset);
487 }
488 } else {
489 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
490 static_cast<int>(Instruction::kPackedSwitchSignature));
491 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
492 int disp = tab_rec->targets[elems]->offset - bx_offset;
493 if (cu_->verbose) {
494 LOG(INFO) << " Case[" << elems << "] disp: 0x"
495 << std::hex << disp;
496 }
497 PushWord(code_buffer_, tab_rec->targets[elems]->offset - bx_offset);
498 }
499 }
500 }
501}
502
503/* Write the fill array dta to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700504void Mir2Lir::InstallFillArrayData() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700505 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
506 while (true) {
507 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
508 if (tab_rec == NULL) break;
509 AlignBuffer(code_buffer_, tab_rec->offset);
510 for (int i = 0; i < (tab_rec->size + 1) / 2; i++) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700511 code_buffer_.push_back(tab_rec->table[i] & 0xFF);
512 code_buffer_.push_back((tab_rec->table[i] >> 8) & 0xFF);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700513 }
514 }
515}
516
buzbee0d829482013-10-11 15:24:55 -0700517static int AssignLiteralOffsetCommon(LIR* lir, CodeOffset offset) {
Brian Carlstrom02c8cc62013-07-18 15:54:44 -0700518 for (; lir != NULL; lir = lir->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700519 lir->offset = offset;
520 offset += 4;
521 }
522 return offset;
523}
524
buzbee0d829482013-10-11 15:24:55 -0700525static int AssignLiteralPointerOffsetCommon(LIR* lir, CodeOffset offset) {
526 unsigned int element_size = sizeof(void*);
527 // Align to natural pointer size.
528 offset = (offset + (element_size - 1)) & ~(element_size - 1);
529 for (; lir != NULL; lir = lir->next) {
530 lir->offset = offset;
531 offset += element_size;
532 }
533 return offset;
534}
535
Brian Carlstrom7940e442013-07-12 13:46:57 -0700536// Make sure we have a code address for every declared catch entry
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700537bool Mir2Lir::VerifyCatchEntries() {
Vladimir Marko06606b92013-12-02 15:31:08 +0000538 MappingTable table(&encoded_mapping_table_[0]);
539 std::vector<uint32_t> dex_pcs;
540 dex_pcs.reserve(table.DexToPcSize());
541 for (auto it = table.DexToPcBegin(), end = table.DexToPcEnd(); it != end; ++it) {
542 dex_pcs.push_back(it.DexPc());
543 }
544 // Sort dex_pcs, so that we can quickly check it against the ordered mir_graph_->catches_.
545 std::sort(dex_pcs.begin(), dex_pcs.end());
546
Brian Carlstrom7940e442013-07-12 13:46:57 -0700547 bool success = true;
Vladimir Marko06606b92013-12-02 15:31:08 +0000548 auto it = dex_pcs.begin(), end = dex_pcs.end();
549 for (uint32_t dex_pc : mir_graph_->catches_) {
550 while (it != end && *it < dex_pc) {
551 LOG(INFO) << "Unexpected catch entry @ dex pc 0x" << std::hex << *it;
552 ++it;
553 success = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700554 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000555 if (it == end || *it > dex_pc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700556 LOG(INFO) << "Missing native PC for catch entry @ 0x" << std::hex << dex_pc;
557 success = false;
Vladimir Marko06606b92013-12-02 15:31:08 +0000558 } else {
559 ++it;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700560 }
561 }
562 if (!success) {
563 LOG(INFO) << "Bad dex2pcMapping table in " << PrettyMethod(cu_->method_idx, *cu_->dex_file);
564 LOG(INFO) << "Entries @ decode: " << mir_graph_->catches_.size() << ", Entries in table: "
Vladimir Marko06606b92013-12-02 15:31:08 +0000565 << table.DexToPcSize();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700566 }
567 return success;
568}
569
570
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700571void Mir2Lir::CreateMappingTables() {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000572 uint32_t pc2dex_data_size = 0u;
573 uint32_t pc2dex_entries = 0u;
574 uint32_t pc2dex_offset = 0u;
575 uint32_t pc2dex_dalvik_offset = 0u;
576 uint32_t dex2pc_data_size = 0u;
577 uint32_t dex2pc_entries = 0u;
578 uint32_t dex2pc_offset = 0u;
579 uint32_t dex2pc_dalvik_offset = 0u;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700580 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
581 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000582 pc2dex_entries += 1;
583 DCHECK(pc2dex_offset <= tgt_lir->offset);
584 pc2dex_data_size += UnsignedLeb128Size(tgt_lir->offset - pc2dex_offset);
585 pc2dex_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) -
586 static_cast<int32_t>(pc2dex_dalvik_offset));
587 pc2dex_offset = tgt_lir->offset;
588 pc2dex_dalvik_offset = tgt_lir->dalvik_offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700589 }
590 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000591 dex2pc_entries += 1;
592 DCHECK(dex2pc_offset <= tgt_lir->offset);
593 dex2pc_data_size += UnsignedLeb128Size(tgt_lir->offset - dex2pc_offset);
594 dex2pc_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) -
595 static_cast<int32_t>(dex2pc_dalvik_offset));
596 dex2pc_offset = tgt_lir->offset;
597 dex2pc_dalvik_offset = tgt_lir->dalvik_offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700598 }
599 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000600
601 uint32_t total_entries = pc2dex_entries + dex2pc_entries;
602 uint32_t hdr_data_size = UnsignedLeb128Size(total_entries) + UnsignedLeb128Size(pc2dex_entries);
603 uint32_t data_size = hdr_data_size + pc2dex_data_size + dex2pc_data_size;
Vladimir Marko06606b92013-12-02 15:31:08 +0000604 encoded_mapping_table_.resize(data_size);
605 uint8_t* write_pos = &encoded_mapping_table_[0];
606 write_pos = EncodeUnsignedLeb128(write_pos, total_entries);
607 write_pos = EncodeUnsignedLeb128(write_pos, pc2dex_entries);
608 DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]), hdr_data_size);
609 uint8_t* write_pos2 = write_pos + pc2dex_data_size;
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000610
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000611 pc2dex_offset = 0u;
612 pc2dex_dalvik_offset = 0u;
Vladimir Marko06606b92013-12-02 15:31:08 +0000613 dex2pc_offset = 0u;
614 dex2pc_dalvik_offset = 0u;
615 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
616 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
617 DCHECK(pc2dex_offset <= tgt_lir->offset);
618 write_pos = EncodeUnsignedLeb128(write_pos, tgt_lir->offset - pc2dex_offset);
619 write_pos = EncodeSignedLeb128(write_pos, static_cast<int32_t>(tgt_lir->dalvik_offset) -
620 static_cast<int32_t>(pc2dex_dalvik_offset));
621 pc2dex_offset = tgt_lir->offset;
622 pc2dex_dalvik_offset = tgt_lir->dalvik_offset;
623 }
624 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
625 DCHECK(dex2pc_offset <= tgt_lir->offset);
626 write_pos2 = EncodeUnsignedLeb128(write_pos2, tgt_lir->offset - dex2pc_offset);
627 write_pos2 = EncodeSignedLeb128(write_pos2, static_cast<int32_t>(tgt_lir->dalvik_offset) -
628 static_cast<int32_t>(dex2pc_dalvik_offset));
629 dex2pc_offset = tgt_lir->offset;
630 dex2pc_dalvik_offset = tgt_lir->dalvik_offset;
631 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000632 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000633 DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]),
634 hdr_data_size + pc2dex_data_size);
635 DCHECK_EQ(static_cast<size_t>(write_pos2 - &encoded_mapping_table_[0]), data_size);
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000636
Ian Rogers96faf5b2013-08-09 22:05:32 -0700637 if (kIsDebugBuild) {
Vladimir Marko06606b92013-12-02 15:31:08 +0000638 CHECK(VerifyCatchEntries());
639
Ian Rogers96faf5b2013-08-09 22:05:32 -0700640 // Verify the encoded table holds the expected data.
Vladimir Marko06606b92013-12-02 15:31:08 +0000641 MappingTable table(&encoded_mapping_table_[0]);
Ian Rogers96faf5b2013-08-09 22:05:32 -0700642 CHECK_EQ(table.TotalSize(), total_entries);
643 CHECK_EQ(table.PcToDexSize(), pc2dex_entries);
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000644 auto it = table.PcToDexBegin();
Vladimir Marko06606b92013-12-02 15:31:08 +0000645 auto it2 = table.DexToPcBegin();
646 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
647 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
648 CHECK_EQ(tgt_lir->offset, it.NativePcOffset());
649 CHECK_EQ(tgt_lir->dalvik_offset, it.DexPc());
650 ++it;
651 }
652 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
653 CHECK_EQ(tgt_lir->offset, it2.NativePcOffset());
654 CHECK_EQ(tgt_lir->dalvik_offset, it2.DexPc());
655 ++it2;
656 }
Ian Rogers96faf5b2013-08-09 22:05:32 -0700657 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000658 CHECK(it == table.PcToDexEnd());
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000659 CHECK(it2 == table.DexToPcEnd());
Ian Rogers96faf5b2013-08-09 22:05:32 -0700660 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700661}
662
663class NativePcToReferenceMapBuilder {
664 public:
665 NativePcToReferenceMapBuilder(std::vector<uint8_t>* table,
666 size_t entries, uint32_t max_native_offset,
667 size_t references_width) : entries_(entries),
668 references_width_(references_width), in_use_(entries),
669 table_(table) {
670 // Compute width in bytes needed to hold max_native_offset.
671 native_offset_width_ = 0;
672 while (max_native_offset != 0) {
673 native_offset_width_++;
674 max_native_offset >>= 8;
675 }
676 // Resize table and set up header.
677 table->resize((EntryWidth() * entries) + sizeof(uint32_t));
678 CHECK_LT(native_offset_width_, 1U << 3);
679 (*table)[0] = native_offset_width_ & 7;
680 CHECK_LT(references_width_, 1U << 13);
681 (*table)[0] |= (references_width_ << 3) & 0xFF;
682 (*table)[1] = (references_width_ >> 5) & 0xFF;
683 CHECK_LT(entries, 1U << 16);
684 (*table)[2] = entries & 0xFF;
685 (*table)[3] = (entries >> 8) & 0xFF;
686 }
687
688 void AddEntry(uint32_t native_offset, const uint8_t* references) {
689 size_t table_index = TableIndex(native_offset);
690 while (in_use_[table_index]) {
691 table_index = (table_index + 1) % entries_;
692 }
693 in_use_[table_index] = true;
buzbee0d829482013-10-11 15:24:55 -0700694 SetCodeOffset(table_index, native_offset);
695 DCHECK_EQ(native_offset, GetCodeOffset(table_index));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700696 SetReferences(table_index, references);
697 }
698
699 private:
700 size_t TableIndex(uint32_t native_offset) {
701 return NativePcOffsetToReferenceMap::Hash(native_offset) % entries_;
702 }
703
buzbee0d829482013-10-11 15:24:55 -0700704 uint32_t GetCodeOffset(size_t table_index) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700705 uint32_t native_offset = 0;
706 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
707 for (size_t i = 0; i < native_offset_width_; i++) {
708 native_offset |= (*table_)[table_offset + i] << (i * 8);
709 }
710 return native_offset;
711 }
712
buzbee0d829482013-10-11 15:24:55 -0700713 void SetCodeOffset(size_t table_index, uint32_t native_offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700714 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
715 for (size_t i = 0; i < native_offset_width_; i++) {
716 (*table_)[table_offset + i] = (native_offset >> (i * 8)) & 0xFF;
717 }
718 }
719
720 void SetReferences(size_t table_index, const uint8_t* references) {
721 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
722 memcpy(&(*table_)[table_offset + native_offset_width_], references, references_width_);
723 }
724
725 size_t EntryWidth() const {
726 return native_offset_width_ + references_width_;
727 }
728
729 // Number of entries in the table.
730 const size_t entries_;
731 // Number of bytes used to encode the reference bitmap.
732 const size_t references_width_;
733 // Number of bytes used to encode a native offset.
734 size_t native_offset_width_;
735 // Entries that are in use.
736 std::vector<bool> in_use_;
737 // The table we're building.
738 std::vector<uint8_t>* const table_;
739};
740
741void Mir2Lir::CreateNativeGcMap() {
Vladimir Marko06606b92013-12-02 15:31:08 +0000742 DCHECK(!encoded_mapping_table_.empty());
743 MappingTable mapping_table(&encoded_mapping_table_[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700744 uint32_t max_native_offset = 0;
Vladimir Marko06606b92013-12-02 15:31:08 +0000745 for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) {
746 uint32_t native_offset = it.NativePcOffset();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700747 if (native_offset > max_native_offset) {
748 max_native_offset = native_offset;
749 }
750 }
751 MethodReference method_ref(cu_->dex_file, cu_->method_idx);
Vladimir Marko2b5eaa22013-12-13 13:59:30 +0000752 const std::vector<uint8_t>* gc_map_raw =
753 cu_->compiler_driver->GetVerifiedMethodsData()->GetDexGcMap(method_ref);
Vladimir Marko8171fc32013-11-26 17:05:58 +0000754 verifier::DexPcToReferenceMap dex_gc_map(&(*gc_map_raw)[0]);
755 DCHECK_EQ(gc_map_raw->size(), dex_gc_map.RawSize());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700756 // Compute native offset to references size.
757 NativePcToReferenceMapBuilder native_gc_map_builder(&native_gc_map_,
Vladimir Marko06606b92013-12-02 15:31:08 +0000758 mapping_table.PcToDexSize(),
759 max_native_offset, dex_gc_map.RegWidth());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700760
Vladimir Marko06606b92013-12-02 15:31:08 +0000761 for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) {
762 uint32_t native_offset = it.NativePcOffset();
763 uint32_t dex_pc = it.DexPc();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700764 const uint8_t* references = dex_gc_map.FindBitMap(dex_pc, false);
765 CHECK(references != NULL) << "Missing ref for dex pc 0x" << std::hex << dex_pc;
766 native_gc_map_builder.AddEntry(native_offset, references);
767 }
768}
769
770/* Determine the offset of each literal field */
buzbee0d829482013-10-11 15:24:55 -0700771int Mir2Lir::AssignLiteralOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700772 offset = AssignLiteralOffsetCommon(literal_list_, offset);
buzbee0d829482013-10-11 15:24:55 -0700773 offset = AssignLiteralPointerOffsetCommon(code_literal_list_, offset);
774 offset = AssignLiteralPointerOffsetCommon(method_literal_list_, offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700775 return offset;
776}
777
buzbee0d829482013-10-11 15:24:55 -0700778int Mir2Lir::AssignSwitchTablesOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700779 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
780 while (true) {
buzbee0d829482013-10-11 15:24:55 -0700781 Mir2Lir::SwitchTable* tab_rec = iterator.Next();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700782 if (tab_rec == NULL) break;
783 tab_rec->offset = offset;
784 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
785 offset += tab_rec->table[1] * (sizeof(int) * 2);
786 } else {
787 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
788 static_cast<int>(Instruction::kPackedSwitchSignature));
789 offset += tab_rec->table[1] * sizeof(int);
790 }
791 }
792 return offset;
793}
794
buzbee0d829482013-10-11 15:24:55 -0700795int Mir2Lir::AssignFillArrayDataOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700796 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
797 while (true) {
798 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
799 if (tab_rec == NULL) break;
800 tab_rec->offset = offset;
801 offset += tab_rec->size;
802 // word align
803 offset = (offset + 3) & ~3;
804 }
805 return offset;
806}
807
Brian Carlstrom7940e442013-07-12 13:46:57 -0700808/*
809 * Insert a kPseudoCaseLabel at the beginning of the Dalvik
buzbeeb48819d2013-09-14 16:15:25 -0700810 * offset vaddr if pretty-printing, otherise use the standard block
811 * label. The selected label will be used to fix up the case
buzbee252254b2013-09-08 16:20:53 -0700812 * branch table during the assembly phase. All resource flags
813 * are set to prevent code motion. KeyVal is just there for debugging.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700814 */
buzbee0d829482013-10-11 15:24:55 -0700815LIR* Mir2Lir::InsertCaseLabel(DexOffset vaddr, int keyVal) {
buzbee252254b2013-09-08 16:20:53 -0700816 LIR* boundary_lir = &block_label_list_[mir_graph_->FindBlock(vaddr)->id];
buzbeeb48819d2013-09-14 16:15:25 -0700817 LIR* res = boundary_lir;
818 if (cu_->verbose) {
819 // Only pay the expense if we're pretty-printing.
820 LIR* new_label = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), ArenaAllocator::kAllocLIR));
821 new_label->dalvik_offset = vaddr;
822 new_label->opcode = kPseudoCaseLabel;
823 new_label->operands[0] = keyVal;
824 new_label->flags.fixup = kFixupLabel;
825 DCHECK(!new_label->flags.use_def_invalid);
826 new_label->u.m.def_mask = ENCODE_ALL;
827 InsertLIRAfter(boundary_lir, new_label);
828 res = new_label;
829 }
830 return res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700831}
832
buzbee0d829482013-10-11 15:24:55 -0700833void Mir2Lir::MarkPackedCaseLabels(Mir2Lir::SwitchTable* tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700834 const uint16_t* table = tab_rec->table;
buzbee0d829482013-10-11 15:24:55 -0700835 DexOffset base_vaddr = tab_rec->vaddr;
836 const int32_t *targets = reinterpret_cast<const int32_t*>(&table[4]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700837 int entries = table[1];
838 int low_key = s4FromSwitchData(&table[2]);
839 for (int i = 0; i < entries; i++) {
840 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], i + low_key);
841 }
842}
843
buzbee0d829482013-10-11 15:24:55 -0700844void Mir2Lir::MarkSparseCaseLabels(Mir2Lir::SwitchTable* tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700845 const uint16_t* table = tab_rec->table;
buzbee0d829482013-10-11 15:24:55 -0700846 DexOffset base_vaddr = tab_rec->vaddr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700847 int entries = table[1];
buzbee0d829482013-10-11 15:24:55 -0700848 const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]);
849 const int32_t* targets = &keys[entries];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700850 for (int i = 0; i < entries; i++) {
851 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], keys[i]);
852 }
853}
854
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700855void Mir2Lir::ProcessSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700856 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
857 while (true) {
858 Mir2Lir::SwitchTable *tab_rec = iterator.Next();
859 if (tab_rec == NULL) break;
860 if (tab_rec->table[0] == Instruction::kPackedSwitchSignature) {
861 MarkPackedCaseLabels(tab_rec);
862 } else if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
863 MarkSparseCaseLabels(tab_rec);
864 } else {
865 LOG(FATAL) << "Invalid switch table";
866 }
867 }
868}
869
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700870void Mir2Lir::DumpSparseSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700871 /*
872 * Sparse switch data format:
873 * ushort ident = 0x0200 magic value
874 * ushort size number of entries in the table; > 0
875 * int keys[size] keys, sorted low-to-high; 32-bit aligned
876 * int targets[size] branch targets, relative to switch opcode
877 *
878 * Total size is (2+size*4) 16-bit code units.
879 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700880 uint16_t ident = table[0];
881 int entries = table[1];
buzbee0d829482013-10-11 15:24:55 -0700882 const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]);
883 const int32_t* targets = &keys[entries];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700884 LOG(INFO) << "Sparse switch table - ident:0x" << std::hex << ident
885 << ", entries: " << std::dec << entries;
886 for (int i = 0; i < entries; i++) {
887 LOG(INFO) << " Key[" << keys[i] << "] -> 0x" << std::hex << targets[i];
888 }
889}
890
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700891void Mir2Lir::DumpPackedSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700892 /*
893 * Packed switch data format:
894 * ushort ident = 0x0100 magic value
895 * ushort size number of entries in the table
896 * int first_key first (and lowest) switch case value
897 * int targets[size] branch targets, relative to switch opcode
898 *
899 * Total size is (4+size*2) 16-bit code units.
900 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700901 uint16_t ident = table[0];
buzbee0d829482013-10-11 15:24:55 -0700902 const int32_t* targets = reinterpret_cast<const int32_t*>(&table[4]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700903 int entries = table[1];
904 int low_key = s4FromSwitchData(&table[2]);
905 LOG(INFO) << "Packed switch table - ident:0x" << std::hex << ident
906 << ", entries: " << std::dec << entries << ", low_key: " << low_key;
907 for (int i = 0; i < entries; i++) {
908 LOG(INFO) << " Key[" << (i + low_key) << "] -> 0x" << std::hex
909 << targets[i];
910 }
911}
912
buzbee252254b2013-09-08 16:20:53 -0700913/* Set up special LIR to mark a Dalvik byte-code instruction start for pretty printing */
buzbee0d829482013-10-11 15:24:55 -0700914void Mir2Lir::MarkBoundary(DexOffset offset, const char* inst_str) {
915 // NOTE: only used for debug listings.
916 NewLIR1(kPseudoDalvikByteCodeBoundary, WrapPointer(ArenaStrdup(inst_str)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700917}
918
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700919bool Mir2Lir::EvaluateBranch(Instruction::Code opcode, int32_t src1, int32_t src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700920 bool is_taken;
921 switch (opcode) {
922 case Instruction::IF_EQ: is_taken = (src1 == src2); break;
923 case Instruction::IF_NE: is_taken = (src1 != src2); break;
924 case Instruction::IF_LT: is_taken = (src1 < src2); break;
925 case Instruction::IF_GE: is_taken = (src1 >= src2); break;
926 case Instruction::IF_GT: is_taken = (src1 > src2); break;
927 case Instruction::IF_LE: is_taken = (src1 <= src2); break;
928 case Instruction::IF_EQZ: is_taken = (src1 == 0); break;
929 case Instruction::IF_NEZ: is_taken = (src1 != 0); break;
930 case Instruction::IF_LTZ: is_taken = (src1 < 0); break;
931 case Instruction::IF_GEZ: is_taken = (src1 >= 0); break;
932 case Instruction::IF_GTZ: is_taken = (src1 > 0); break;
933 case Instruction::IF_LEZ: is_taken = (src1 <= 0); break;
934 default:
935 LOG(FATAL) << "Unexpected opcode " << opcode;
936 is_taken = false;
937 }
938 return is_taken;
939}
940
941// Convert relation of src1/src2 to src2/src1
942ConditionCode Mir2Lir::FlipComparisonOrder(ConditionCode before) {
943 ConditionCode res;
944 switch (before) {
945 case kCondEq: res = kCondEq; break;
946 case kCondNe: res = kCondNe; break;
947 case kCondLt: res = kCondGt; break;
948 case kCondGt: res = kCondLt; break;
949 case kCondLe: res = kCondGe; break;
950 case kCondGe: res = kCondLe; break;
951 default:
952 res = static_cast<ConditionCode>(0);
953 LOG(FATAL) << "Unexpected ccode " << before;
954 }
955 return res;
956}
957
958// TODO: move to mir_to_lir.cc
959Mir2Lir::Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena)
960 : Backend(arena),
961 literal_list_(NULL),
962 method_literal_list_(NULL),
963 code_literal_list_(NULL),
buzbeeb48819d2013-09-14 16:15:25 -0700964 first_fixup_(NULL),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700965 cu_(cu),
966 mir_graph_(mir_graph),
967 switch_tables_(arena, 4, kGrowableArraySwitchTables),
968 fill_array_data_(arena, 4, kGrowableArrayFillArrayData),
969 throw_launchpads_(arena, 2048, kGrowableArrayThrowLaunchPads),
970 suspend_launchpads_(arena, 4, kGrowableArraySuspendLaunchPads),
971 intrinsic_launchpads_(arena, 2048, kGrowableArrayMisc),
buzbeebd663de2013-09-10 15:41:31 -0700972 tempreg_info_(arena, 20, kGrowableArrayMisc),
973 reginfo_map_(arena, 64, kGrowableArrayMisc),
buzbee0d829482013-10-11 15:24:55 -0700974 pointer_storage_(arena, 128, kGrowableArrayMisc),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700975 data_offset_(0),
976 total_size_(0),
977 block_label_list_(NULL),
978 current_dalvik_offset_(0),
buzbeeb48819d2013-09-14 16:15:25 -0700979 estimated_native_code_size_(0),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700980 reg_pool_(NULL),
981 live_sreg_(0),
982 num_core_spills_(0),
983 num_fp_spills_(0),
984 frame_size_(0),
985 core_spill_mask_(0),
986 fp_spill_mask_(0),
987 first_lir_insn_(NULL),
Vladimir Marko5816ed42013-11-27 17:04:20 +0000988 last_lir_insn_(NULL) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700989 promotion_map_ = static_cast<PromotionMap*>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700990 (arena_->Alloc((cu_->num_dalvik_registers + cu_->num_compiler_temps + 1) *
991 sizeof(promotion_map_[0]), ArenaAllocator::kAllocRegAlloc));
buzbee0d829482013-10-11 15:24:55 -0700992 // Reserve pointer id 0 for NULL.
993 size_t null_idx = WrapPointer(NULL);
994 DCHECK_EQ(null_idx, 0U);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700995}
996
997void Mir2Lir::Materialize() {
buzbeea61f4952013-08-23 14:27:06 -0700998 cu_->NewTimingSplit("RegisterAllocation");
Brian Carlstrom7940e442013-07-12 13:46:57 -0700999 CompilerInitializeRegAlloc(); // Needs to happen after SSA naming
1000
1001 /* Allocate Registers using simple local allocation scheme */
1002 SimpleRegAlloc();
1003
Vladimir Marko5816ed42013-11-27 17:04:20 +00001004 /*
1005 * Custom codegen for special cases. If for any reason the
Vladimir Marko51154732014-01-02 09:44:23 +00001006 * special codegen doesn't succeed, first_lir_insn_ will be
Vladimir Marko5816ed42013-11-27 17:04:20 +00001007 * set to NULL;
1008 */
1009 // TODO: Clean up GenSpecial() and return true only if special implementation is emitted.
1010 // Currently, GenSpecial() returns IsSpecial() but doesn't check after SpecialMIR2LIR().
1011 DCHECK(cu_->compiler_driver->GetMethodInlinerMap() != nullptr);
1012 cu_->compiler_driver->GetMethodInlinerMap()->GetMethodInliner(cu_->dex_file)
1013 ->GenSpecial(this, cu_->method_idx);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001014
1015 /* Convert MIR to LIR, etc. */
1016 if (first_lir_insn_ == NULL) {
1017 MethodMIR2LIR();
1018 }
1019
1020 /* Method is not empty */
1021 if (first_lir_insn_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001022 // mark the targets of switch statement case labels
1023 ProcessSwitchTables();
1024
1025 /* Convert LIR into machine code. */
1026 AssembleLIR();
1027
1028 if (cu_->verbose) {
1029 CodegenDump();
1030 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001031 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001032}
1033
1034CompiledMethod* Mir2Lir::GetCompiledMethod() {
1035 // Combine vmap tables - core regs, then fp regs - into vmap_table
Ian Rogers96faf5b2013-08-09 22:05:32 -07001036 std::vector<uint16_t> raw_vmap_table;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001037 // Core regs may have been inserted out of order - sort first
1038 std::sort(core_vmap_table_.begin(), core_vmap_table_.end());
Mathieu Chartier193bad92013-08-29 18:46:00 -07001039 for (size_t i = 0 ; i < core_vmap_table_.size(); ++i) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001040 // Copy, stripping out the phys register sort key
Ian Rogers96faf5b2013-08-09 22:05:32 -07001041 raw_vmap_table.push_back(~(-1 << VREG_NUM_WIDTH) & core_vmap_table_[i]);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001042 }
1043 // If we have a frame, push a marker to take place of lr
1044 if (frame_size_ > 0) {
Ian Rogers96faf5b2013-08-09 22:05:32 -07001045 raw_vmap_table.push_back(INVALID_VREG);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001046 } else {
1047 DCHECK_EQ(__builtin_popcount(core_spill_mask_), 0);
1048 DCHECK_EQ(__builtin_popcount(fp_spill_mask_), 0);
1049 }
1050 // Combine vmap tables - core regs, then fp regs. fp regs already sorted
1051 for (uint32_t i = 0; i < fp_vmap_table_.size(); i++) {
Ian Rogers96faf5b2013-08-09 22:05:32 -07001052 raw_vmap_table.push_back(fp_vmap_table_[i]);
1053 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +00001054 Leb128EncodingVector vmap_encoder;
Ian Rogers96faf5b2013-08-09 22:05:32 -07001055 // Prefix the encoded data with its size.
Vladimir Marko1e6cb632013-11-28 16:27:29 +00001056 vmap_encoder.PushBackUnsigned(raw_vmap_table.size());
Mathieu Chartier193bad92013-08-29 18:46:00 -07001057 for (uint16_t cur : raw_vmap_table) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +00001058 vmap_encoder.PushBackUnsigned(cur);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001059 }
1060 CompiledMethod* result =
Mathieu Chartier193bad92013-08-29 18:46:00 -07001061 new CompiledMethod(*cu_->compiler_driver, cu_->instruction_set, code_buffer_, frame_size_,
Vladimir Marko06606b92013-12-02 15:31:08 +00001062 core_spill_mask_, fp_spill_mask_, encoded_mapping_table_,
Mathieu Chartier193bad92013-08-29 18:46:00 -07001063 vmap_encoder.GetData(), native_gc_map_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001064 return result;
1065}
1066
1067int Mir2Lir::ComputeFrameSize() {
1068 /* Figure out the frame size */
1069 static const uint32_t kAlignMask = kStackAlignment - 1;
1070 uint32_t size = (num_core_spills_ + num_fp_spills_ +
1071 1 /* filler word */ + cu_->num_regs + cu_->num_outs +
1072 cu_->num_compiler_temps + 1 /* cur_method* */)
1073 * sizeof(uint32_t);
1074 /* Align and set */
1075 return (size + kAlignMask) & ~(kAlignMask);
1076}
1077
1078/*
1079 * Append an LIR instruction to the LIR list maintained by a compilation
1080 * unit
1081 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001082void Mir2Lir::AppendLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001083 if (first_lir_insn_ == NULL) {
1084 DCHECK(last_lir_insn_ == NULL);
1085 last_lir_insn_ = first_lir_insn_ = lir;
1086 lir->prev = lir->next = NULL;
1087 } else {
1088 last_lir_insn_->next = lir;
1089 lir->prev = last_lir_insn_;
1090 lir->next = NULL;
1091 last_lir_insn_ = lir;
1092 }
1093}
1094
1095/*
1096 * Insert an LIR instruction before the current instruction, which cannot be the
1097 * first instruction.
1098 *
1099 * prev_lir <-> new_lir <-> current_lir
1100 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001101void Mir2Lir::InsertLIRBefore(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001102 DCHECK(current_lir->prev != NULL);
1103 LIR *prev_lir = current_lir->prev;
1104
1105 prev_lir->next = new_lir;
1106 new_lir->prev = prev_lir;
1107 new_lir->next = current_lir;
1108 current_lir->prev = new_lir;
1109}
1110
1111/*
1112 * Insert an LIR instruction after the current instruction, which cannot be the
1113 * first instruction.
1114 *
1115 * current_lir -> new_lir -> old_next
1116 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001117void Mir2Lir::InsertLIRAfter(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001118 new_lir->prev = current_lir;
1119 new_lir->next = current_lir->next;
1120 current_lir->next = new_lir;
1121 new_lir->next->prev = new_lir;
1122}
1123
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001124} // namespace art