blob: d89f1ed227a76c4dbbdcab42b9689c37f2d5fde9 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex_file-inl.h"
19#include "gc_map.h"
Ian Rogers96faf5b2013-08-09 22:05:32 -070020#include "mapping_table.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070021#include "mir_to_lir-inl.h"
22#include "verifier/dex_gc_map.h"
23#include "verifier/method_verifier.h"
24
25namespace art {
26
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070027bool Mir2Lir::IsInexpensiveConstant(RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070028 bool res = false;
29 if (rl_src.is_const) {
30 if (rl_src.wide) {
31 if (rl_src.fp) {
32 res = InexpensiveConstantDouble(mir_graph_->ConstantValueWide(rl_src));
33 } else {
34 res = InexpensiveConstantLong(mir_graph_->ConstantValueWide(rl_src));
35 }
36 } else {
37 if (rl_src.fp) {
38 res = InexpensiveConstantFloat(mir_graph_->ConstantValue(rl_src));
39 } else {
40 res = InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src));
41 }
42 }
43 }
44 return res;
45}
46
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070047void Mir2Lir::MarkSafepointPC(LIR* inst) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070048 inst->def_mask = ENCODE_ALL;
49 LIR* safepoint_pc = NewLIR0(kPseudoSafepointPC);
50 DCHECK_EQ(safepoint_pc->def_mask, ENCODE_ALL);
51}
52
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070053bool Mir2Lir::FastInstance(uint32_t field_idx, int& field_offset, bool& is_volatile, bool is_put) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070054 return cu_->compiler_driver->ComputeInstanceFieldInfo(
55 field_idx, mir_graph_->GetCurrentDexCompilationUnit(), field_offset, is_volatile, is_put);
56}
57
58/* Convert an instruction to a NOP */
Brian Carlstromdf629502013-07-17 22:39:56 -070059void Mir2Lir::NopLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070060 lir->flags.is_nop = true;
61}
62
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070063void Mir2Lir::SetMemRefType(LIR* lir, bool is_load, int mem_type) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070064 uint64_t *mask_ptr;
Brian Carlstromf69863b2013-07-17 21:53:13 -070065 uint64_t mask = ENCODE_MEM;
Brian Carlstrom7940e442013-07-12 13:46:57 -070066 DCHECK(GetTargetInstFlags(lir->opcode) & (IS_LOAD | IS_STORE));
67 if (is_load) {
68 mask_ptr = &lir->use_mask;
69 } else {
70 mask_ptr = &lir->def_mask;
71 }
72 /* Clear out the memref flags */
73 *mask_ptr &= ~mask;
74 /* ..and then add back the one we need */
75 switch (mem_type) {
76 case kLiteral:
77 DCHECK(is_load);
78 *mask_ptr |= ENCODE_LITERAL;
79 break;
80 case kDalvikReg:
81 *mask_ptr |= ENCODE_DALVIK_REG;
82 break;
83 case kHeapRef:
84 *mask_ptr |= ENCODE_HEAP_REF;
85 break;
86 case kMustNotAlias:
87 /* Currently only loads can be marked as kMustNotAlias */
88 DCHECK(!(GetTargetInstFlags(lir->opcode) & IS_STORE));
89 *mask_ptr |= ENCODE_MUST_NOT_ALIAS;
90 break;
91 default:
92 LOG(FATAL) << "Oat: invalid memref kind - " << mem_type;
93 }
94}
95
96/*
97 * Mark load/store instructions that access Dalvik registers through the stack.
98 */
99void Mir2Lir::AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700100 bool is64bit) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700101 SetMemRefType(lir, is_load, kDalvikReg);
102
103 /*
104 * Store the Dalvik register id in alias_info. Mark the MSB if it is a 64-bit
105 * access.
106 */
107 lir->alias_info = ENCODE_ALIAS_INFO(reg_id, is64bit);
108}
109
110/*
111 * Debugging macros
112 */
113#define DUMP_RESOURCE_MASK(X)
114
115/* Pretty-print a LIR instruction */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700116void Mir2Lir::DumpLIRInsn(LIR* lir, unsigned char* base_addr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700117 int offset = lir->offset;
118 int dest = lir->operands[0];
119 const bool dump_nop = (cu_->enable_debug & (1 << kDebugShowNops));
120
121 /* Handle pseudo-ops individually, and all regular insns as a group */
122 switch (lir->opcode) {
123 case kPseudoMethodEntry:
124 LOG(INFO) << "-------- method entry "
125 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
126 break;
127 case kPseudoMethodExit:
128 LOG(INFO) << "-------- Method_Exit";
129 break;
130 case kPseudoBarrier:
131 LOG(INFO) << "-------- BARRIER";
132 break;
133 case kPseudoEntryBlock:
134 LOG(INFO) << "-------- entry offset: 0x" << std::hex << dest;
135 break;
136 case kPseudoDalvikByteCodeBoundary:
137 if (lir->operands[0] == 0) {
138 lir->operands[0] = reinterpret_cast<uintptr_t>("No instruction string");
139 }
140 LOG(INFO) << "-------- dalvik offset: 0x" << std::hex
141 << lir->dalvik_offset << " @ " << reinterpret_cast<char*>(lir->operands[0]);
142 break;
143 case kPseudoExitBlock:
144 LOG(INFO) << "-------- exit offset: 0x" << std::hex << dest;
145 break;
146 case kPseudoPseudoAlign4:
147 LOG(INFO) << reinterpret_cast<uintptr_t>(base_addr) + offset << " (0x" << std::hex
148 << offset << "): .align4";
149 break;
150 case kPseudoEHBlockLabel:
151 LOG(INFO) << "Exception_Handling:";
152 break;
153 case kPseudoTargetLabel:
154 case kPseudoNormalBlockLabel:
155 LOG(INFO) << "L" << reinterpret_cast<void*>(lir) << ":";
156 break;
157 case kPseudoThrowTarget:
158 LOG(INFO) << "LT" << reinterpret_cast<void*>(lir) << ":";
159 break;
160 case kPseudoIntrinsicRetry:
161 LOG(INFO) << "IR" << reinterpret_cast<void*>(lir) << ":";
162 break;
163 case kPseudoSuspendTarget:
164 LOG(INFO) << "LS" << reinterpret_cast<void*>(lir) << ":";
165 break;
166 case kPseudoSafepointPC:
167 LOG(INFO) << "LsafepointPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
168 break;
169 case kPseudoExportedPC:
170 LOG(INFO) << "LexportedPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
171 break;
172 case kPseudoCaseLabel:
173 LOG(INFO) << "LC" << reinterpret_cast<void*>(lir) << ": Case target 0x"
174 << std::hex << lir->operands[0] << "|" << std::dec <<
175 lir->operands[0];
176 break;
177 default:
178 if (lir->flags.is_nop && !dump_nop) {
179 break;
180 } else {
181 std::string op_name(BuildInsnString(GetTargetInstName(lir->opcode),
182 lir, base_addr));
183 std::string op_operands(BuildInsnString(GetTargetInstFmt(lir->opcode),
184 lir, base_addr));
185 LOG(INFO) << StringPrintf("%05x: %-9s%s%s",
186 reinterpret_cast<unsigned int>(base_addr + offset),
187 op_name.c_str(), op_operands.c_str(),
188 lir->flags.is_nop ? "(nop)" : "");
189 }
190 break;
191 }
192
193 if (lir->use_mask && (!lir->flags.is_nop || dump_nop)) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700194 DUMP_RESOURCE_MASK(DumpResourceMask(lir, lir->use_mask, "use"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700195 }
196 if (lir->def_mask && (!lir->flags.is_nop || dump_nop)) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700197 DUMP_RESOURCE_MASK(DumpResourceMask(lir, lir->def_mask, "def"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700198 }
199}
200
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700201void Mir2Lir::DumpPromotionMap() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700202 int num_regs = cu_->num_dalvik_registers + cu_->num_compiler_temps + 1;
203 for (int i = 0; i < num_regs; i++) {
204 PromotionMap v_reg_map = promotion_map_[i];
205 std::string buf;
206 if (v_reg_map.fp_location == kLocPhysReg) {
207 StringAppendF(&buf, " : s%d", v_reg_map.FpReg & FpRegMask());
208 }
209
210 std::string buf3;
211 if (i < cu_->num_dalvik_registers) {
212 StringAppendF(&buf3, "%02d", i);
213 } else if (i == mir_graph_->GetMethodSReg()) {
214 buf3 = "Method*";
215 } else {
216 StringAppendF(&buf3, "ct%d", i - cu_->num_dalvik_registers);
217 }
218
219 LOG(INFO) << StringPrintf("V[%s] -> %s%d%s", buf3.c_str(),
220 v_reg_map.core_location == kLocPhysReg ?
221 "r" : "SP+", v_reg_map.core_location == kLocPhysReg ?
222 v_reg_map.core_reg : SRegOffset(i),
223 buf.c_str());
224 }
225}
226
227/* Dump a mapping table */
228void Mir2Lir::DumpMappingTable(const char* table_name, const std::string& descriptor,
229 const std::string& name, const std::string& signature,
230 const std::vector<uint32_t>& v) {
231 if (v.size() > 0) {
232 std::string line(StringPrintf("\n %s %s%s_%s_table[%zu] = {", table_name,
233 descriptor.c_str(), name.c_str(), signature.c_str(), v.size()));
234 std::replace(line.begin(), line.end(), ';', '_');
235 LOG(INFO) << line;
236 for (uint32_t i = 0; i < v.size(); i+=2) {
237 line = StringPrintf(" {0x%05x, 0x%04x},", v[i], v[i+1]);
238 LOG(INFO) << line;
239 }
240 LOG(INFO) <<" };\n\n";
241 }
242}
243
244/* Dump instructions and constant pool contents */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700245void Mir2Lir::CodegenDump() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700246 LOG(INFO) << "Dumping LIR insns for "
247 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
248 LIR* lir_insn;
249 int insns_size = cu_->code_item->insns_size_in_code_units_;
250
251 LOG(INFO) << "Regs (excluding ins) : " << cu_->num_regs;
252 LOG(INFO) << "Ins : " << cu_->num_ins;
253 LOG(INFO) << "Outs : " << cu_->num_outs;
254 LOG(INFO) << "CoreSpills : " << num_core_spills_;
255 LOG(INFO) << "FPSpills : " << num_fp_spills_;
256 LOG(INFO) << "CompilerTemps : " << cu_->num_compiler_temps;
257 LOG(INFO) << "Frame size : " << frame_size_;
258 LOG(INFO) << "code size is " << total_size_ <<
259 " bytes, Dalvik size is " << insns_size * 2;
260 LOG(INFO) << "expansion factor: "
261 << static_cast<float>(total_size_) / static_cast<float>(insns_size * 2);
262 DumpPromotionMap();
263 for (lir_insn = first_lir_insn_; lir_insn != NULL; lir_insn = lir_insn->next) {
264 DumpLIRInsn(lir_insn, 0);
265 }
266 for (lir_insn = literal_list_; lir_insn != NULL; lir_insn = lir_insn->next) {
267 LOG(INFO) << StringPrintf("%x (%04x): .word (%#x)", lir_insn->offset, lir_insn->offset,
268 lir_insn->operands[0]);
269 }
270
271 const DexFile::MethodId& method_id =
272 cu_->dex_file->GetMethodId(cu_->method_idx);
273 std::string signature(cu_->dex_file->GetMethodSignature(method_id));
274 std::string name(cu_->dex_file->GetMethodName(method_id));
275 std::string descriptor(cu_->dex_file->GetMethodDeclaringClassDescriptor(method_id));
276
277 // Dump mapping tables
278 DumpMappingTable("PC2Dex_MappingTable", descriptor, name, signature, pc2dex_mapping_table_);
279 DumpMappingTable("Dex2PC_MappingTable", descriptor, name, signature, dex2pc_mapping_table_);
280}
281
282/*
283 * Search the existing constants in the literal pool for an exact or close match
284 * within specified delta (greater or equal to 0).
285 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700286LIR* Mir2Lir::ScanLiteralPool(LIR* data_target, int value, unsigned int delta) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700287 while (data_target) {
288 if ((static_cast<unsigned>(value - data_target->operands[0])) <= delta)
289 return data_target;
290 data_target = data_target->next;
291 }
292 return NULL;
293}
294
295/* Search the existing constants in the literal pool for an exact wide match */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700296LIR* Mir2Lir::ScanLiteralPoolWide(LIR* data_target, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700297 bool lo_match = false;
298 LIR* lo_target = NULL;
299 while (data_target) {
300 if (lo_match && (data_target->operands[0] == val_hi)) {
301 // Record high word in case we need to expand this later.
302 lo_target->operands[1] = val_hi;
303 return lo_target;
304 }
305 lo_match = false;
306 if (data_target->operands[0] == val_lo) {
307 lo_match = true;
308 lo_target = data_target;
309 }
310 data_target = data_target->next;
311 }
312 return NULL;
313}
314
315/*
316 * The following are building blocks to insert constants into the pool or
317 * instruction streams.
318 */
319
320/* Add a 32-bit constant to the constant pool */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700321LIR* Mir2Lir::AddWordData(LIR* *constant_list_p, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700322 /* Add the constant to the literal pool */
323 if (constant_list_p) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700324 LIR* new_value = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), ArenaAllocator::kAllocData));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700325 new_value->operands[0] = value;
326 new_value->next = *constant_list_p;
327 *constant_list_p = new_value;
328 return new_value;
329 }
330 return NULL;
331}
332
333/* Add a 64-bit constant to the constant pool or mixed with code */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700334LIR* Mir2Lir::AddWideData(LIR* *constant_list_p, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700335 AddWordData(constant_list_p, val_hi);
336 return AddWordData(constant_list_p, val_lo);
337}
338
339static void PushWord(std::vector<uint8_t>&buf, int data) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700340 buf.push_back(data & 0xff);
341 buf.push_back((data >> 8) & 0xff);
342 buf.push_back((data >> 16) & 0xff);
343 buf.push_back((data >> 24) & 0xff);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700344}
345
346static void AlignBuffer(std::vector<uint8_t>&buf, size_t offset) {
347 while (buf.size() < offset) {
348 buf.push_back(0);
349 }
350}
351
352/* Write the literal pool to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700353void Mir2Lir::InstallLiteralPools() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700354 AlignBuffer(code_buffer_, data_offset_);
355 LIR* data_lir = literal_list_;
356 while (data_lir != NULL) {
357 PushWord(code_buffer_, data_lir->operands[0]);
358 data_lir = NEXT_LIR(data_lir);
359 }
360 // Push code and method literals, record offsets for the compiler to patch.
361 data_lir = code_literal_list_;
362 while (data_lir != NULL) {
363 uint32_t target = data_lir->operands[0];
364 cu_->compiler_driver->AddCodePatch(cu_->dex_file,
365 cu_->method_idx,
366 cu_->invoke_type,
367 target,
368 static_cast<InvokeType>(data_lir->operands[1]),
369 code_buffer_.size());
370 const DexFile::MethodId& id = cu_->dex_file->GetMethodId(target);
371 // unique based on target to ensure code deduplication works
372 uint32_t unique_patch_value = reinterpret_cast<uint32_t>(&id);
373 PushWord(code_buffer_, unique_patch_value);
374 data_lir = NEXT_LIR(data_lir);
375 }
376 data_lir = method_literal_list_;
377 while (data_lir != NULL) {
378 uint32_t target = data_lir->operands[0];
379 cu_->compiler_driver->AddMethodPatch(cu_->dex_file,
380 cu_->method_idx,
381 cu_->invoke_type,
382 target,
383 static_cast<InvokeType>(data_lir->operands[1]),
384 code_buffer_.size());
385 const DexFile::MethodId& id = cu_->dex_file->GetMethodId(target);
386 // unique based on target to ensure code deduplication works
387 uint32_t unique_patch_value = reinterpret_cast<uint32_t>(&id);
388 PushWord(code_buffer_, unique_patch_value);
389 data_lir = NEXT_LIR(data_lir);
390 }
391}
392
393/* Write the switch tables to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700394void Mir2Lir::InstallSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700395 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
396 while (true) {
397 Mir2Lir::SwitchTable* tab_rec = iterator.Next();
398 if (tab_rec == NULL) break;
399 AlignBuffer(code_buffer_, tab_rec->offset);
400 /*
401 * For Arm, our reference point is the address of the bx
402 * instruction that does the launch, so we have to subtract
403 * the auto pc-advance. For other targets the reference point
404 * is a label, so we can use the offset as-is.
405 */
406 int bx_offset = INVALID_OFFSET;
407 switch (cu_->instruction_set) {
408 case kThumb2:
409 bx_offset = tab_rec->anchor->offset + 4;
410 break;
411 case kX86:
412 bx_offset = 0;
413 break;
414 case kMips:
415 bx_offset = tab_rec->anchor->offset;
416 break;
417 default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set;
418 }
419 if (cu_->verbose) {
420 LOG(INFO) << "Switch table for offset 0x" << std::hex << bx_offset;
421 }
422 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
423 const int* keys = reinterpret_cast<const int*>(&(tab_rec->table[2]));
424 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
425 int disp = tab_rec->targets[elems]->offset - bx_offset;
426 if (cu_->verbose) {
427 LOG(INFO) << " Case[" << elems << "] key: 0x"
428 << std::hex << keys[elems] << ", disp: 0x"
429 << std::hex << disp;
430 }
431 PushWord(code_buffer_, keys[elems]);
432 PushWord(code_buffer_,
433 tab_rec->targets[elems]->offset - bx_offset);
434 }
435 } else {
436 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
437 static_cast<int>(Instruction::kPackedSwitchSignature));
438 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
439 int disp = tab_rec->targets[elems]->offset - bx_offset;
440 if (cu_->verbose) {
441 LOG(INFO) << " Case[" << elems << "] disp: 0x"
442 << std::hex << disp;
443 }
444 PushWord(code_buffer_, tab_rec->targets[elems]->offset - bx_offset);
445 }
446 }
447 }
448}
449
450/* Write the fill array dta to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700451void Mir2Lir::InstallFillArrayData() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700452 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
453 while (true) {
454 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
455 if (tab_rec == NULL) break;
456 AlignBuffer(code_buffer_, tab_rec->offset);
457 for (int i = 0; i < (tab_rec->size + 1) / 2; i++) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700458 code_buffer_.push_back(tab_rec->table[i] & 0xFF);
459 code_buffer_.push_back((tab_rec->table[i] >> 8) & 0xFF);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700460 }
461 }
462}
463
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700464static int AssignLiteralOffsetCommon(LIR* lir, int offset) {
Brian Carlstrom02c8cc62013-07-18 15:54:44 -0700465 for (; lir != NULL; lir = lir->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700466 lir->offset = offset;
467 offset += 4;
468 }
469 return offset;
470}
471
472// Make sure we have a code address for every declared catch entry
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700473bool Mir2Lir::VerifyCatchEntries() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700474 bool success = true;
475 for (std::set<uint32_t>::const_iterator it = mir_graph_->catches_.begin();
476 it != mir_graph_->catches_.end(); ++it) {
477 uint32_t dex_pc = *it;
478 bool found = false;
479 for (size_t i = 0; i < dex2pc_mapping_table_.size(); i += 2) {
480 if (dex_pc == dex2pc_mapping_table_[i+1]) {
481 found = true;
482 break;
483 }
484 }
485 if (!found) {
486 LOG(INFO) << "Missing native PC for catch entry @ 0x" << std::hex << dex_pc;
487 success = false;
488 }
489 }
490 // Now, try in the other direction
491 for (size_t i = 0; i < dex2pc_mapping_table_.size(); i += 2) {
492 uint32_t dex_pc = dex2pc_mapping_table_[i+1];
493 if (mir_graph_->catches_.find(dex_pc) == mir_graph_->catches_.end()) {
494 LOG(INFO) << "Unexpected catch entry @ dex pc 0x" << std::hex << dex_pc;
495 success = false;
496 }
497 }
498 if (!success) {
499 LOG(INFO) << "Bad dex2pcMapping table in " << PrettyMethod(cu_->method_idx, *cu_->dex_file);
500 LOG(INFO) << "Entries @ decode: " << mir_graph_->catches_.size() << ", Entries in table: "
501 << dex2pc_mapping_table_.size()/2;
502 }
503 return success;
504}
505
506
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700507void Mir2Lir::CreateMappingTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700508 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
509 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
510 pc2dex_mapping_table_.push_back(tgt_lir->offset);
511 pc2dex_mapping_table_.push_back(tgt_lir->dalvik_offset);
512 }
513 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
514 dex2pc_mapping_table_.push_back(tgt_lir->offset);
515 dex2pc_mapping_table_.push_back(tgt_lir->dalvik_offset);
516 }
517 }
518 if (kIsDebugBuild) {
Ian Rogers96faf5b2013-08-09 22:05:32 -0700519 CHECK(VerifyCatchEntries());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700520 }
Ian Rogers96faf5b2013-08-09 22:05:32 -0700521 CHECK_EQ(pc2dex_mapping_table_.size() & 1, 0U);
522 CHECK_EQ(dex2pc_mapping_table_.size() & 1, 0U);
523 uint32_t total_entries = (pc2dex_mapping_table_.size() + dex2pc_mapping_table_.size()) / 2;
524 uint32_t pc2dex_entries = pc2dex_mapping_table_.size() / 2;
525 encoded_mapping_table_.PushBack(total_entries);
526 encoded_mapping_table_.PushBack(pc2dex_entries);
527 encoded_mapping_table_.InsertBack(pc2dex_mapping_table_.begin(), pc2dex_mapping_table_.end());
528 encoded_mapping_table_.InsertBack(dex2pc_mapping_table_.begin(), dex2pc_mapping_table_.end());
529 if (kIsDebugBuild) {
530 // Verify the encoded table holds the expected data.
531 MappingTable table(&encoded_mapping_table_.GetData()[0]);
532 CHECK_EQ(table.TotalSize(), total_entries);
533 CHECK_EQ(table.PcToDexSize(), pc2dex_entries);
534 CHECK_EQ(table.DexToPcSize(), dex2pc_mapping_table_.size() / 2);
535 MappingTable::PcToDexIterator it = table.PcToDexBegin();
536 for (uint32_t i = 0; i < pc2dex_mapping_table_.size(); ++i, ++it) {
537 CHECK_EQ(pc2dex_mapping_table_.at(i), it.NativePcOffset());
538 ++i;
539 CHECK_EQ(pc2dex_mapping_table_.at(i), it.DexPc());
540 }
541 MappingTable::DexToPcIterator it2 = table.DexToPcBegin();
542 for (uint32_t i = 0; i < dex2pc_mapping_table_.size(); ++i, ++it2) {
543 CHECK_EQ(dex2pc_mapping_table_.at(i), it2.NativePcOffset());
544 ++i;
545 CHECK_EQ(dex2pc_mapping_table_.at(i), it2.DexPc());
546 }
547 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700548}
549
550class NativePcToReferenceMapBuilder {
551 public:
552 NativePcToReferenceMapBuilder(std::vector<uint8_t>* table,
553 size_t entries, uint32_t max_native_offset,
554 size_t references_width) : entries_(entries),
555 references_width_(references_width), in_use_(entries),
556 table_(table) {
557 // Compute width in bytes needed to hold max_native_offset.
558 native_offset_width_ = 0;
559 while (max_native_offset != 0) {
560 native_offset_width_++;
561 max_native_offset >>= 8;
562 }
563 // Resize table and set up header.
564 table->resize((EntryWidth() * entries) + sizeof(uint32_t));
565 CHECK_LT(native_offset_width_, 1U << 3);
566 (*table)[0] = native_offset_width_ & 7;
567 CHECK_LT(references_width_, 1U << 13);
568 (*table)[0] |= (references_width_ << 3) & 0xFF;
569 (*table)[1] = (references_width_ >> 5) & 0xFF;
570 CHECK_LT(entries, 1U << 16);
571 (*table)[2] = entries & 0xFF;
572 (*table)[3] = (entries >> 8) & 0xFF;
573 }
574
575 void AddEntry(uint32_t native_offset, const uint8_t* references) {
576 size_t table_index = TableIndex(native_offset);
577 while (in_use_[table_index]) {
578 table_index = (table_index + 1) % entries_;
579 }
580 in_use_[table_index] = true;
581 SetNativeOffset(table_index, native_offset);
582 DCHECK_EQ(native_offset, GetNativeOffset(table_index));
583 SetReferences(table_index, references);
584 }
585
586 private:
587 size_t TableIndex(uint32_t native_offset) {
588 return NativePcOffsetToReferenceMap::Hash(native_offset) % entries_;
589 }
590
591 uint32_t GetNativeOffset(size_t table_index) {
592 uint32_t native_offset = 0;
593 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
594 for (size_t i = 0; i < native_offset_width_; i++) {
595 native_offset |= (*table_)[table_offset + i] << (i * 8);
596 }
597 return native_offset;
598 }
599
600 void SetNativeOffset(size_t table_index, uint32_t native_offset) {
601 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
602 for (size_t i = 0; i < native_offset_width_; i++) {
603 (*table_)[table_offset + i] = (native_offset >> (i * 8)) & 0xFF;
604 }
605 }
606
607 void SetReferences(size_t table_index, const uint8_t* references) {
608 size_t table_offset = (table_index * EntryWidth()) + sizeof(uint32_t);
609 memcpy(&(*table_)[table_offset + native_offset_width_], references, references_width_);
610 }
611
612 size_t EntryWidth() const {
613 return native_offset_width_ + references_width_;
614 }
615
616 // Number of entries in the table.
617 const size_t entries_;
618 // Number of bytes used to encode the reference bitmap.
619 const size_t references_width_;
620 // Number of bytes used to encode a native offset.
621 size_t native_offset_width_;
622 // Entries that are in use.
623 std::vector<bool> in_use_;
624 // The table we're building.
625 std::vector<uint8_t>* const table_;
626};
627
628void Mir2Lir::CreateNativeGcMap() {
629 const std::vector<uint32_t>& mapping_table = pc2dex_mapping_table_;
630 uint32_t max_native_offset = 0;
631 for (size_t i = 0; i < mapping_table.size(); i += 2) {
632 uint32_t native_offset = mapping_table[i + 0];
633 if (native_offset > max_native_offset) {
634 max_native_offset = native_offset;
635 }
636 }
637 MethodReference method_ref(cu_->dex_file, cu_->method_idx);
638 const std::vector<uint8_t>* gc_map_raw = verifier::MethodVerifier::GetDexGcMap(method_ref);
639 verifier::DexPcToReferenceMap dex_gc_map(&(*gc_map_raw)[4], gc_map_raw->size() - 4);
640 // Compute native offset to references size.
641 NativePcToReferenceMapBuilder native_gc_map_builder(&native_gc_map_,
642 mapping_table.size() / 2, max_native_offset,
643 dex_gc_map.RegWidth());
644
645 for (size_t i = 0; i < mapping_table.size(); i += 2) {
646 uint32_t native_offset = mapping_table[i + 0];
647 uint32_t dex_pc = mapping_table[i + 1];
648 const uint8_t* references = dex_gc_map.FindBitMap(dex_pc, false);
649 CHECK(references != NULL) << "Missing ref for dex pc 0x" << std::hex << dex_pc;
650 native_gc_map_builder.AddEntry(native_offset, references);
651 }
652}
653
654/* Determine the offset of each literal field */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700655int Mir2Lir::AssignLiteralOffset(int offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700656 offset = AssignLiteralOffsetCommon(literal_list_, offset);
657 offset = AssignLiteralOffsetCommon(code_literal_list_, offset);
658 offset = AssignLiteralOffsetCommon(method_literal_list_, offset);
659 return offset;
660}
661
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700662int Mir2Lir::AssignSwitchTablesOffset(int offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700663 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
664 while (true) {
665 Mir2Lir::SwitchTable *tab_rec = iterator.Next();
666 if (tab_rec == NULL) break;
667 tab_rec->offset = offset;
668 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
669 offset += tab_rec->table[1] * (sizeof(int) * 2);
670 } else {
671 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
672 static_cast<int>(Instruction::kPackedSwitchSignature));
673 offset += tab_rec->table[1] * sizeof(int);
674 }
675 }
676 return offset;
677}
678
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700679int Mir2Lir::AssignFillArrayDataOffset(int offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700680 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
681 while (true) {
682 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
683 if (tab_rec == NULL) break;
684 tab_rec->offset = offset;
685 offset += tab_rec->size;
686 // word align
687 offset = (offset + 3) & ~3;
688 }
689 return offset;
690}
691
692// LIR offset assignment.
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700693int Mir2Lir::AssignInsnOffsets() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700694 LIR* lir;
695 int offset = 0;
696
697 for (lir = first_lir_insn_; lir != NULL; lir = NEXT_LIR(lir)) {
698 lir->offset = offset;
699 if (lir->opcode >= 0) {
700 if (!lir->flags.is_nop) {
701 offset += lir->flags.size;
702 }
703 } else if (lir->opcode == kPseudoPseudoAlign4) {
704 if (offset & 0x2) {
705 offset += 2;
706 lir->operands[0] = 1;
707 } else {
708 lir->operands[0] = 0;
709 }
710 }
711 /* Pseudo opcodes don't consume space */
712 }
713
714 return offset;
715}
716
717/*
718 * Walk the compilation unit and assign offsets to instructions
719 * and literals and compute the total size of the compiled unit.
720 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700721void Mir2Lir::AssignOffsets() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700722 int offset = AssignInsnOffsets();
723
724 /* Const values have to be word aligned */
725 offset = (offset + 3) & ~3;
726
727 /* Set up offsets for literals */
728 data_offset_ = offset;
729
730 offset = AssignLiteralOffset(offset);
731
732 offset = AssignSwitchTablesOffset(offset);
733
734 offset = AssignFillArrayDataOffset(offset);
735
736 total_size_ = offset;
737}
738
739/*
740 * Go over each instruction in the list and calculate the offset from the top
741 * before sending them off to the assembler. If out-of-range branch distance is
742 * seen rearrange the instructions a bit to correct it.
743 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700744void Mir2Lir::AssembleLIR() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700745 AssignOffsets();
746 int assembler_retries = 0;
747 /*
748 * Assemble here. Note that we generate code with optimistic assumptions
749 * and if found now to work, we'll have to redo the sequence and retry.
750 */
751
752 while (true) {
753 AssemblerStatus res = AssembleInstructions(0);
754 if (res == kSuccess) {
755 break;
756 } else {
757 assembler_retries++;
758 if (assembler_retries > MAX_ASSEMBLER_RETRIES) {
759 CodegenDump();
760 LOG(FATAL) << "Assembler error - too many retries";
761 }
762 // Redo offsets and try again
763 AssignOffsets();
764 code_buffer_.clear();
765 }
766 }
767
768 // Install literals
769 InstallLiteralPools();
770
771 // Install switch tables
772 InstallSwitchTables();
773
774 // Install fill array data
775 InstallFillArrayData();
776
777 // Create the mapping table and native offset to reference map.
778 CreateMappingTables();
779
780 CreateNativeGcMap();
781}
782
783/*
784 * Insert a kPseudoCaseLabel at the beginning of the Dalvik
785 * offset vaddr. This label will be used to fix up the case
786 * branch table during the assembly phase. Be sure to set
787 * all resource flags on this to prevent code motion across
788 * target boundaries. KeyVal is just there for debugging.
789 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700790LIR* Mir2Lir::InsertCaseLabel(int vaddr, int keyVal) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700791 SafeMap<unsigned int, LIR*>::iterator it;
792 it = boundary_map_.find(vaddr);
793 if (it == boundary_map_.end()) {
794 LOG(FATAL) << "Error: didn't find vaddr 0x" << std::hex << vaddr;
795 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700796 LIR* new_label = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), ArenaAllocator::kAllocLIR));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700797 new_label->dalvik_offset = vaddr;
798 new_label->opcode = kPseudoCaseLabel;
799 new_label->operands[0] = keyVal;
800 InsertLIRAfter(it->second, new_label);
801 return new_label;
802}
803
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700804void Mir2Lir::MarkPackedCaseLabels(Mir2Lir::SwitchTable *tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700805 const uint16_t* table = tab_rec->table;
806 int base_vaddr = tab_rec->vaddr;
807 const int *targets = reinterpret_cast<const int*>(&table[4]);
808 int entries = table[1];
809 int low_key = s4FromSwitchData(&table[2]);
810 for (int i = 0; i < entries; i++) {
811 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], i + low_key);
812 }
813}
814
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700815void Mir2Lir::MarkSparseCaseLabels(Mir2Lir::SwitchTable *tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700816 const uint16_t* table = tab_rec->table;
817 int base_vaddr = tab_rec->vaddr;
818 int entries = table[1];
819 const int* keys = reinterpret_cast<const int*>(&table[2]);
820 const int* targets = &keys[entries];
821 for (int i = 0; i < entries; i++) {
822 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], keys[i]);
823 }
824}
825
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700826void Mir2Lir::ProcessSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700827 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
828 while (true) {
829 Mir2Lir::SwitchTable *tab_rec = iterator.Next();
830 if (tab_rec == NULL) break;
831 if (tab_rec->table[0] == Instruction::kPackedSwitchSignature) {
832 MarkPackedCaseLabels(tab_rec);
833 } else if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
834 MarkSparseCaseLabels(tab_rec);
835 } else {
836 LOG(FATAL) << "Invalid switch table";
837 }
838 }
839}
840
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700841void Mir2Lir::DumpSparseSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700842 /*
843 * Sparse switch data format:
844 * ushort ident = 0x0200 magic value
845 * ushort size number of entries in the table; > 0
846 * int keys[size] keys, sorted low-to-high; 32-bit aligned
847 * int targets[size] branch targets, relative to switch opcode
848 *
849 * Total size is (2+size*4) 16-bit code units.
850 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700851 uint16_t ident = table[0];
852 int entries = table[1];
853 const int* keys = reinterpret_cast<const int*>(&table[2]);
854 const int* targets = &keys[entries];
855 LOG(INFO) << "Sparse switch table - ident:0x" << std::hex << ident
856 << ", entries: " << std::dec << entries;
857 for (int i = 0; i < entries; i++) {
858 LOG(INFO) << " Key[" << keys[i] << "] -> 0x" << std::hex << targets[i];
859 }
860}
861
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700862void Mir2Lir::DumpPackedSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700863 /*
864 * Packed switch data format:
865 * ushort ident = 0x0100 magic value
866 * ushort size number of entries in the table
867 * int first_key first (and lowest) switch case value
868 * int targets[size] branch targets, relative to switch opcode
869 *
870 * Total size is (4+size*2) 16-bit code units.
871 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700872 uint16_t ident = table[0];
873 const int* targets = reinterpret_cast<const int*>(&table[4]);
874 int entries = table[1];
875 int low_key = s4FromSwitchData(&table[2]);
876 LOG(INFO) << "Packed switch table - ident:0x" << std::hex << ident
877 << ", entries: " << std::dec << entries << ", low_key: " << low_key;
878 for (int i = 0; i < entries; i++) {
879 LOG(INFO) << " Key[" << (i + low_key) << "] -> 0x" << std::hex
880 << targets[i];
881 }
882}
883
884/*
885 * Set up special LIR to mark a Dalvik byte-code instruction start and
886 * record it in the boundary_map. NOTE: in cases such as kMirOpCheck in
887 * which we split a single Dalvik instruction, only the first MIR op
888 * associated with a Dalvik PC should be entered into the map.
889 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700890LIR* Mir2Lir::MarkBoundary(int offset, const char* inst_str) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700891 LIR* res = NewLIR1(kPseudoDalvikByteCodeBoundary, reinterpret_cast<uintptr_t>(inst_str));
892 if (boundary_map_.find(offset) == boundary_map_.end()) {
893 boundary_map_.Put(offset, res);
894 }
895 return res;
896}
897
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700898bool Mir2Lir::EvaluateBranch(Instruction::Code opcode, int32_t src1, int32_t src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700899 bool is_taken;
900 switch (opcode) {
901 case Instruction::IF_EQ: is_taken = (src1 == src2); break;
902 case Instruction::IF_NE: is_taken = (src1 != src2); break;
903 case Instruction::IF_LT: is_taken = (src1 < src2); break;
904 case Instruction::IF_GE: is_taken = (src1 >= src2); break;
905 case Instruction::IF_GT: is_taken = (src1 > src2); break;
906 case Instruction::IF_LE: is_taken = (src1 <= src2); break;
907 case Instruction::IF_EQZ: is_taken = (src1 == 0); break;
908 case Instruction::IF_NEZ: is_taken = (src1 != 0); break;
909 case Instruction::IF_LTZ: is_taken = (src1 < 0); break;
910 case Instruction::IF_GEZ: is_taken = (src1 >= 0); break;
911 case Instruction::IF_GTZ: is_taken = (src1 > 0); break;
912 case Instruction::IF_LEZ: is_taken = (src1 <= 0); break;
913 default:
914 LOG(FATAL) << "Unexpected opcode " << opcode;
915 is_taken = false;
916 }
917 return is_taken;
918}
919
920// Convert relation of src1/src2 to src2/src1
921ConditionCode Mir2Lir::FlipComparisonOrder(ConditionCode before) {
922 ConditionCode res;
923 switch (before) {
924 case kCondEq: res = kCondEq; break;
925 case kCondNe: res = kCondNe; break;
926 case kCondLt: res = kCondGt; break;
927 case kCondGt: res = kCondLt; break;
928 case kCondLe: res = kCondGe; break;
929 case kCondGe: res = kCondLe; break;
930 default:
931 res = static_cast<ConditionCode>(0);
932 LOG(FATAL) << "Unexpected ccode " << before;
933 }
934 return res;
935}
936
937// TODO: move to mir_to_lir.cc
938Mir2Lir::Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena)
939 : Backend(arena),
940 literal_list_(NULL),
941 method_literal_list_(NULL),
942 code_literal_list_(NULL),
943 cu_(cu),
944 mir_graph_(mir_graph),
945 switch_tables_(arena, 4, kGrowableArraySwitchTables),
946 fill_array_data_(arena, 4, kGrowableArrayFillArrayData),
947 throw_launchpads_(arena, 2048, kGrowableArrayThrowLaunchPads),
948 suspend_launchpads_(arena, 4, kGrowableArraySuspendLaunchPads),
949 intrinsic_launchpads_(arena, 2048, kGrowableArrayMisc),
950 data_offset_(0),
951 total_size_(0),
952 block_label_list_(NULL),
953 current_dalvik_offset_(0),
954 reg_pool_(NULL),
955 live_sreg_(0),
956 num_core_spills_(0),
957 num_fp_spills_(0),
958 frame_size_(0),
959 core_spill_mask_(0),
960 fp_spill_mask_(0),
961 first_lir_insn_(NULL),
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700962 last_lir_insn_(NULL) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700963 promotion_map_ = static_cast<PromotionMap*>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700964 (arena_->Alloc((cu_->num_dalvik_registers + cu_->num_compiler_temps + 1) *
965 sizeof(promotion_map_[0]), ArenaAllocator::kAllocRegAlloc));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700966}
967
968void Mir2Lir::Materialize() {
969 CompilerInitializeRegAlloc(); // Needs to happen after SSA naming
970
971 /* Allocate Registers using simple local allocation scheme */
972 SimpleRegAlloc();
973
buzbee479f83c2013-07-19 10:58:21 -0700974 if (mir_graph_->IsSpecialCase()) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700975 /*
976 * Custom codegen for special cases. If for any reason the
977 * special codegen doesn't succeed, first_lir_insn_ will
978 * set to NULL;
979 */
buzbee479f83c2013-07-19 10:58:21 -0700980 SpecialMIR2LIR(mir_graph_->GetSpecialCase());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700981 }
982
983 /* Convert MIR to LIR, etc. */
984 if (first_lir_insn_ == NULL) {
985 MethodMIR2LIR();
986 }
987
988 /* Method is not empty */
989 if (first_lir_insn_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700990 // mark the targets of switch statement case labels
991 ProcessSwitchTables();
992
993 /* Convert LIR into machine code. */
994 AssembleLIR();
995
996 if (cu_->verbose) {
997 CodegenDump();
998 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700999 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001000}
1001
1002CompiledMethod* Mir2Lir::GetCompiledMethod() {
1003 // Combine vmap tables - core regs, then fp regs - into vmap_table
Ian Rogers96faf5b2013-08-09 22:05:32 -07001004 std::vector<uint16_t> raw_vmap_table;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001005 // Core regs may have been inserted out of order - sort first
1006 std::sort(core_vmap_table_.begin(), core_vmap_table_.end());
1007 for (size_t i = 0 ; i < core_vmap_table_.size(); i++) {
1008 // Copy, stripping out the phys register sort key
Ian Rogers96faf5b2013-08-09 22:05:32 -07001009 raw_vmap_table.push_back(~(-1 << VREG_NUM_WIDTH) & core_vmap_table_[i]);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001010 }
1011 // If we have a frame, push a marker to take place of lr
1012 if (frame_size_ > 0) {
Ian Rogers96faf5b2013-08-09 22:05:32 -07001013 raw_vmap_table.push_back(INVALID_VREG);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001014 } else {
1015 DCHECK_EQ(__builtin_popcount(core_spill_mask_), 0);
1016 DCHECK_EQ(__builtin_popcount(fp_spill_mask_), 0);
1017 }
1018 // Combine vmap tables - core regs, then fp regs. fp regs already sorted
1019 for (uint32_t i = 0; i < fp_vmap_table_.size(); i++) {
Ian Rogers96faf5b2013-08-09 22:05:32 -07001020 raw_vmap_table.push_back(fp_vmap_table_[i]);
1021 }
1022 UnsignedLeb128EncodingVector vmap_encoder;
1023 // Prefix the encoded data with its size.
1024 vmap_encoder.PushBack(raw_vmap_table.size());
1025 typedef std::vector<uint16_t>::const_iterator It;
1026 for (It cur = raw_vmap_table.begin(), end = raw_vmap_table.end(); cur != end; ++cur) {
1027 vmap_encoder.PushBack(*cur);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001028 }
1029 CompiledMethod* result =
1030 new CompiledMethod(cu_->instruction_set, code_buffer_,
1031 frame_size_, core_spill_mask_, fp_spill_mask_,
Ian Rogers96faf5b2013-08-09 22:05:32 -07001032 encoded_mapping_table_.GetData(), vmap_encoder.GetData(), native_gc_map_);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001033 return result;
1034}
1035
1036int Mir2Lir::ComputeFrameSize() {
1037 /* Figure out the frame size */
1038 static const uint32_t kAlignMask = kStackAlignment - 1;
1039 uint32_t size = (num_core_spills_ + num_fp_spills_ +
1040 1 /* filler word */ + cu_->num_regs + cu_->num_outs +
1041 cu_->num_compiler_temps + 1 /* cur_method* */)
1042 * sizeof(uint32_t);
1043 /* Align and set */
1044 return (size + kAlignMask) & ~(kAlignMask);
1045}
1046
1047/*
1048 * Append an LIR instruction to the LIR list maintained by a compilation
1049 * unit
1050 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001051void Mir2Lir::AppendLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001052 if (first_lir_insn_ == NULL) {
1053 DCHECK(last_lir_insn_ == NULL);
1054 last_lir_insn_ = first_lir_insn_ = lir;
1055 lir->prev = lir->next = NULL;
1056 } else {
1057 last_lir_insn_->next = lir;
1058 lir->prev = last_lir_insn_;
1059 lir->next = NULL;
1060 last_lir_insn_ = lir;
1061 }
1062}
1063
1064/*
1065 * Insert an LIR instruction before the current instruction, which cannot be the
1066 * first instruction.
1067 *
1068 * prev_lir <-> new_lir <-> current_lir
1069 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001070void Mir2Lir::InsertLIRBefore(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001071 DCHECK(current_lir->prev != NULL);
1072 LIR *prev_lir = current_lir->prev;
1073
1074 prev_lir->next = new_lir;
1075 new_lir->prev = prev_lir;
1076 new_lir->next = current_lir;
1077 current_lir->prev = new_lir;
1078}
1079
1080/*
1081 * Insert an LIR instruction after the current instruction, which cannot be the
1082 * first instruction.
1083 *
1084 * current_lir -> new_lir -> old_next
1085 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001086void Mir2Lir::InsertLIRAfter(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001087 new_lir->prev = current_lir;
1088 new_lir->next = current_lir->next;
1089 current_lir->next = new_lir;
1090 new_lir->next->prev = new_lir;
1091}
1092
1093
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001094} // namespace art