blob: fbf8a0cc5b2c779ab6f4c08fe9399e96192ec8e9 [file] [log] [blame]
Brian Carlstrom7940e442013-07-12 13:46:57 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "dex/compiler_internals.h"
18#include "dex_file-inl.h"
19#include "gc_map.h"
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +000020#include "gc_map_builder.h"
Ian Rogers96faf5b2013-08-09 22:05:32 -070021#include "mapping_table.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070022#include "mir_to_lir-inl.h"
Vladimir Marko5816ed42013-11-27 17:04:20 +000023#include "dex/quick/dex_file_method_inliner.h"
24#include "dex/quick/dex_file_to_method_inliner_map.h"
Vladimir Markoc7f83202014-01-24 17:55:18 +000025#include "dex/verification_results.h"
Vladimir Marko2730db02014-01-27 11:15:17 +000026#include "dex/verified_method.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070027#include "verifier/dex_gc_map.h"
28#include "verifier/method_verifier.h"
Vladimir Marko2e589aa2014-02-25 17:53:53 +000029#include "vmap_table.h"
Brian Carlstrom7940e442013-07-12 13:46:57 -070030
31namespace art {
32
Vladimir Marko06606b92013-12-02 15:31:08 +000033namespace {
34
35/* Dump a mapping table */
36template <typename It>
37void DumpMappingTable(const char* table_name, const char* descriptor, const char* name,
38 const Signature& signature, uint32_t size, It first) {
39 if (size != 0) {
Ian Rogers107c31e2014-01-23 20:55:29 -080040 std::string line(StringPrintf("\n %s %s%s_%s_table[%u] = {", table_name,
Vladimir Marko06606b92013-12-02 15:31:08 +000041 descriptor, name, signature.ToString().c_str(), size));
42 std::replace(line.begin(), line.end(), ';', '_');
43 LOG(INFO) << line;
44 for (uint32_t i = 0; i != size; ++i) {
45 line = StringPrintf(" {0x%05x, 0x%04x},", first.NativePcOffset(), first.DexPc());
46 ++first;
47 LOG(INFO) << line;
48 }
49 LOG(INFO) <<" };\n\n";
50 }
51}
52
53} // anonymous namespace
54
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070055bool Mir2Lir::IsInexpensiveConstant(RegLocation rl_src) {
Brian Carlstrom7940e442013-07-12 13:46:57 -070056 bool res = false;
57 if (rl_src.is_const) {
58 if (rl_src.wide) {
59 if (rl_src.fp) {
60 res = InexpensiveConstantDouble(mir_graph_->ConstantValueWide(rl_src));
61 } else {
62 res = InexpensiveConstantLong(mir_graph_->ConstantValueWide(rl_src));
63 }
64 } else {
65 if (rl_src.fp) {
66 res = InexpensiveConstantFloat(mir_graph_->ConstantValue(rl_src));
67 } else {
68 res = InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src));
69 }
70 }
71 }
72 return res;
73}
74
Brian Carlstrom2ce745c2013-07-17 17:44:30 -070075void Mir2Lir::MarkSafepointPC(LIR* inst) {
buzbeeb48819d2013-09-14 16:15:25 -070076 DCHECK(!inst->flags.use_def_invalid);
77 inst->u.m.def_mask = ENCODE_ALL;
Brian Carlstrom7940e442013-07-12 13:46:57 -070078 LIR* safepoint_pc = NewLIR0(kPseudoSafepointPC);
buzbeeb48819d2013-09-14 16:15:25 -070079 DCHECK_EQ(safepoint_pc->u.m.def_mask, ENCODE_ALL);
Brian Carlstrom7940e442013-07-12 13:46:57 -070080}
81
buzbee252254b2013-09-08 16:20:53 -070082/* Remove a LIR from the list. */
83void Mir2Lir::UnlinkLIR(LIR* lir) {
84 if (UNLIKELY(lir == first_lir_insn_)) {
85 first_lir_insn_ = lir->next;
86 if (lir->next != NULL) {
87 lir->next->prev = NULL;
88 } else {
89 DCHECK(lir->next == NULL);
90 DCHECK(lir == last_lir_insn_);
91 last_lir_insn_ = NULL;
92 }
93 } else if (lir == last_lir_insn_) {
94 last_lir_insn_ = lir->prev;
95 lir->prev->next = NULL;
96 } else if ((lir->prev != NULL) && (lir->next != NULL)) {
97 lir->prev->next = lir->next;
98 lir->next->prev = lir->prev;
99 }
100}
101
Brian Carlstrom7940e442013-07-12 13:46:57 -0700102/* Convert an instruction to a NOP */
Brian Carlstromdf629502013-07-17 22:39:56 -0700103void Mir2Lir::NopLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700104 lir->flags.is_nop = true;
buzbee252254b2013-09-08 16:20:53 -0700105 if (!cu_->verbose) {
106 UnlinkLIR(lir);
107 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700108}
109
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700110void Mir2Lir::SetMemRefType(LIR* lir, bool is_load, int mem_type) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700111 uint64_t *mask_ptr;
Brian Carlstromf69863b2013-07-17 21:53:13 -0700112 uint64_t mask = ENCODE_MEM;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700113 DCHECK(GetTargetInstFlags(lir->opcode) & (IS_LOAD | IS_STORE));
buzbeeb48819d2013-09-14 16:15:25 -0700114 DCHECK(!lir->flags.use_def_invalid);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700115 if (is_load) {
buzbeeb48819d2013-09-14 16:15:25 -0700116 mask_ptr = &lir->u.m.use_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700117 } else {
buzbeeb48819d2013-09-14 16:15:25 -0700118 mask_ptr = &lir->u.m.def_mask;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700119 }
120 /* Clear out the memref flags */
121 *mask_ptr &= ~mask;
122 /* ..and then add back the one we need */
123 switch (mem_type) {
124 case kLiteral:
125 DCHECK(is_load);
126 *mask_ptr |= ENCODE_LITERAL;
127 break;
128 case kDalvikReg:
129 *mask_ptr |= ENCODE_DALVIK_REG;
130 break;
131 case kHeapRef:
132 *mask_ptr |= ENCODE_HEAP_REF;
133 break;
134 case kMustNotAlias:
135 /* Currently only loads can be marked as kMustNotAlias */
136 DCHECK(!(GetTargetInstFlags(lir->opcode) & IS_STORE));
137 *mask_ptr |= ENCODE_MUST_NOT_ALIAS;
138 break;
139 default:
140 LOG(FATAL) << "Oat: invalid memref kind - " << mem_type;
141 }
142}
143
144/*
145 * Mark load/store instructions that access Dalvik registers through the stack.
146 */
147void Mir2Lir::AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load,
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700148 bool is64bit) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700149 SetMemRefType(lir, is_load, kDalvikReg);
150
151 /*
152 * Store the Dalvik register id in alias_info. Mark the MSB if it is a 64-bit
153 * access.
154 */
buzbeeb48819d2013-09-14 16:15:25 -0700155 lir->flags.alias_info = ENCODE_ALIAS_INFO(reg_id, is64bit);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700156}
157
158/*
159 * Debugging macros
160 */
161#define DUMP_RESOURCE_MASK(X)
162
163/* Pretty-print a LIR instruction */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700164void Mir2Lir::DumpLIRInsn(LIR* lir, unsigned char* base_addr) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700165 int offset = lir->offset;
166 int dest = lir->operands[0];
167 const bool dump_nop = (cu_->enable_debug & (1 << kDebugShowNops));
168
169 /* Handle pseudo-ops individually, and all regular insns as a group */
170 switch (lir->opcode) {
171 case kPseudoMethodEntry:
172 LOG(INFO) << "-------- method entry "
173 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
174 break;
175 case kPseudoMethodExit:
176 LOG(INFO) << "-------- Method_Exit";
177 break;
178 case kPseudoBarrier:
179 LOG(INFO) << "-------- BARRIER";
180 break;
181 case kPseudoEntryBlock:
182 LOG(INFO) << "-------- entry offset: 0x" << std::hex << dest;
183 break;
184 case kPseudoDalvikByteCodeBoundary:
185 if (lir->operands[0] == 0) {
buzbee0d829482013-10-11 15:24:55 -0700186 // NOTE: only used for debug listings.
187 lir->operands[0] = WrapPointer(ArenaStrdup("No instruction string"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700188 }
189 LOG(INFO) << "-------- dalvik offset: 0x" << std::hex
Bill Buzbee0b1191c2013-10-28 22:11:59 +0000190 << lir->dalvik_offset << " @ "
191 << reinterpret_cast<char*>(UnwrapPointer(lir->operands[0]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700192 break;
193 case kPseudoExitBlock:
194 LOG(INFO) << "-------- exit offset: 0x" << std::hex << dest;
195 break;
196 case kPseudoPseudoAlign4:
197 LOG(INFO) << reinterpret_cast<uintptr_t>(base_addr) + offset << " (0x" << std::hex
198 << offset << "): .align4";
199 break;
200 case kPseudoEHBlockLabel:
201 LOG(INFO) << "Exception_Handling:";
202 break;
203 case kPseudoTargetLabel:
204 case kPseudoNormalBlockLabel:
205 LOG(INFO) << "L" << reinterpret_cast<void*>(lir) << ":";
206 break;
207 case kPseudoThrowTarget:
208 LOG(INFO) << "LT" << reinterpret_cast<void*>(lir) << ":";
209 break;
210 case kPseudoIntrinsicRetry:
211 LOG(INFO) << "IR" << reinterpret_cast<void*>(lir) << ":";
212 break;
213 case kPseudoSuspendTarget:
214 LOG(INFO) << "LS" << reinterpret_cast<void*>(lir) << ":";
215 break;
216 case kPseudoSafepointPC:
217 LOG(INFO) << "LsafepointPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
218 break;
219 case kPseudoExportedPC:
220 LOG(INFO) << "LexportedPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":";
221 break;
222 case kPseudoCaseLabel:
223 LOG(INFO) << "LC" << reinterpret_cast<void*>(lir) << ": Case target 0x"
224 << std::hex << lir->operands[0] << "|" << std::dec <<
225 lir->operands[0];
226 break;
227 default:
228 if (lir->flags.is_nop && !dump_nop) {
229 break;
230 } else {
231 std::string op_name(BuildInsnString(GetTargetInstName(lir->opcode),
232 lir, base_addr));
233 std::string op_operands(BuildInsnString(GetTargetInstFmt(lir->opcode),
234 lir, base_addr));
Ian Rogers107c31e2014-01-23 20:55:29 -0800235 LOG(INFO) << StringPrintf("%5p: %-9s%s%s",
236 base_addr + offset,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700237 op_name.c_str(), op_operands.c_str(),
238 lir->flags.is_nop ? "(nop)" : "");
239 }
240 break;
241 }
242
buzbeeb48819d2013-09-14 16:15:25 -0700243 if (lir->u.m.use_mask && (!lir->flags.is_nop || dump_nop)) {
244 DUMP_RESOURCE_MASK(DumpResourceMask(lir, lir->u.m.use_mask, "use"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700245 }
buzbeeb48819d2013-09-14 16:15:25 -0700246 if (lir->u.m.def_mask && (!lir->flags.is_nop || dump_nop)) {
247 DUMP_RESOURCE_MASK(DumpResourceMask(lir, lir->u.m.def_mask, "def"));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700248 }
249}
250
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700251void Mir2Lir::DumpPromotionMap() {
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -0800252 int num_regs = cu_->num_dalvik_registers + mir_graph_->GetNumUsedCompilerTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700253 for (int i = 0; i < num_regs; i++) {
254 PromotionMap v_reg_map = promotion_map_[i];
255 std::string buf;
256 if (v_reg_map.fp_location == kLocPhysReg) {
buzbee091cc402014-03-31 10:14:40 -0700257 StringAppendF(&buf, " : s%d", RegStorage::RegNum(v_reg_map.FpReg));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700258 }
259
260 std::string buf3;
261 if (i < cu_->num_dalvik_registers) {
262 StringAppendF(&buf3, "%02d", i);
263 } else if (i == mir_graph_->GetMethodSReg()) {
264 buf3 = "Method*";
265 } else {
266 StringAppendF(&buf3, "ct%d", i - cu_->num_dalvik_registers);
267 }
268
269 LOG(INFO) << StringPrintf("V[%s] -> %s%d%s", buf3.c_str(),
270 v_reg_map.core_location == kLocPhysReg ?
271 "r" : "SP+", v_reg_map.core_location == kLocPhysReg ?
272 v_reg_map.core_reg : SRegOffset(i),
273 buf.c_str());
274 }
275}
276
buzbee7a11ab02014-04-28 20:02:38 -0700277void Mir2Lir::UpdateLIROffsets() {
278 // Only used for code listings.
279 size_t offset = 0;
280 for (LIR* lir = first_lir_insn_; lir != nullptr; lir = lir->next) {
281 lir->offset = offset;
282 if (!lir->flags.is_nop && !IsPseudoLirOp(lir->opcode)) {
283 offset += GetInsnSize(lir);
284 } else if (lir->opcode == kPseudoPseudoAlign4) {
285 offset += (offset & 0x2);
286 }
287 }
288}
289
Brian Carlstrom7940e442013-07-12 13:46:57 -0700290/* Dump instructions and constant pool contents */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700291void Mir2Lir::CodegenDump() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700292 LOG(INFO) << "Dumping LIR insns for "
293 << PrettyMethod(cu_->method_idx, *cu_->dex_file);
294 LIR* lir_insn;
295 int insns_size = cu_->code_item->insns_size_in_code_units_;
296
297 LOG(INFO) << "Regs (excluding ins) : " << cu_->num_regs;
298 LOG(INFO) << "Ins : " << cu_->num_ins;
299 LOG(INFO) << "Outs : " << cu_->num_outs;
300 LOG(INFO) << "CoreSpills : " << num_core_spills_;
301 LOG(INFO) << "FPSpills : " << num_fp_spills_;
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -0800302 LOG(INFO) << "CompilerTemps : " << mir_graph_->GetNumUsedCompilerTemps();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700303 LOG(INFO) << "Frame size : " << frame_size_;
304 LOG(INFO) << "code size is " << total_size_ <<
305 " bytes, Dalvik size is " << insns_size * 2;
306 LOG(INFO) << "expansion factor: "
307 << static_cast<float>(total_size_) / static_cast<float>(insns_size * 2);
308 DumpPromotionMap();
buzbee7a11ab02014-04-28 20:02:38 -0700309 UpdateLIROffsets();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700310 for (lir_insn = first_lir_insn_; lir_insn != NULL; lir_insn = lir_insn->next) {
311 DumpLIRInsn(lir_insn, 0);
312 }
313 for (lir_insn = literal_list_; lir_insn != NULL; lir_insn = lir_insn->next) {
314 LOG(INFO) << StringPrintf("%x (%04x): .word (%#x)", lir_insn->offset, lir_insn->offset,
315 lir_insn->operands[0]);
316 }
317
318 const DexFile::MethodId& method_id =
319 cu_->dex_file->GetMethodId(cu_->method_idx);
Ian Rogersd91d6d62013-09-25 20:26:14 -0700320 const Signature signature = cu_->dex_file->GetMethodSignature(method_id);
321 const char* name = cu_->dex_file->GetMethodName(method_id);
322 const char* descriptor(cu_->dex_file->GetMethodDeclaringClassDescriptor(method_id));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700323
324 // Dump mapping tables
Vladimir Marko06606b92013-12-02 15:31:08 +0000325 if (!encoded_mapping_table_.empty()) {
326 MappingTable table(&encoded_mapping_table_[0]);
327 DumpMappingTable("PC2Dex_MappingTable", descriptor, name, signature,
328 table.PcToDexSize(), table.PcToDexBegin());
329 DumpMappingTable("Dex2PC_MappingTable", descriptor, name, signature,
330 table.DexToPcSize(), table.DexToPcBegin());
331 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700332}
333
334/*
335 * Search the existing constants in the literal pool for an exact or close match
336 * within specified delta (greater or equal to 0).
337 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700338LIR* Mir2Lir::ScanLiteralPool(LIR* data_target, int value, unsigned int delta) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700339 while (data_target) {
340 if ((static_cast<unsigned>(value - data_target->operands[0])) <= delta)
341 return data_target;
342 data_target = data_target->next;
343 }
344 return NULL;
345}
346
347/* Search the existing constants in the literal pool for an exact wide match */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700348LIR* Mir2Lir::ScanLiteralPoolWide(LIR* data_target, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700349 bool lo_match = false;
350 LIR* lo_target = NULL;
351 while (data_target) {
352 if (lo_match && (data_target->operands[0] == val_hi)) {
353 // Record high word in case we need to expand this later.
354 lo_target->operands[1] = val_hi;
355 return lo_target;
356 }
357 lo_match = false;
358 if (data_target->operands[0] == val_lo) {
359 lo_match = true;
360 lo_target = data_target;
361 }
362 data_target = data_target->next;
363 }
364 return NULL;
365}
366
367/*
368 * The following are building blocks to insert constants into the pool or
369 * instruction streams.
370 */
371
372/* Add a 32-bit constant to the constant pool */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700373LIR* Mir2Lir::AddWordData(LIR* *constant_list_p, int value) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700374 /* Add the constant to the literal pool */
375 if (constant_list_p) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000376 LIR* new_value = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocData));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700377 new_value->operands[0] = value;
378 new_value->next = *constant_list_p;
379 *constant_list_p = new_value;
buzbeeb48819d2013-09-14 16:15:25 -0700380 estimated_native_code_size_ += sizeof(value);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700381 return new_value;
382 }
383 return NULL;
384}
385
386/* Add a 64-bit constant to the constant pool or mixed with code */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700387LIR* Mir2Lir::AddWideData(LIR* *constant_list_p, int val_lo, int val_hi) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700388 AddWordData(constant_list_p, val_hi);
389 return AddWordData(constant_list_p, val_lo);
390}
391
Andreas Gampe2da88232014-02-27 12:26:20 -0800392static void Push32(std::vector<uint8_t>&buf, int data) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700393 buf.push_back(data & 0xff);
394 buf.push_back((data >> 8) & 0xff);
395 buf.push_back((data >> 16) & 0xff);
396 buf.push_back((data >> 24) & 0xff);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700397}
398
Andreas Gampe2da88232014-02-27 12:26:20 -0800399// Push 8 bytes on 64-bit target systems; 4 on 32-bit target systems.
400static void PushPointer(std::vector<uint8_t>&buf, const void* pointer, bool target64) {
401 uint64_t data = reinterpret_cast<uintptr_t>(pointer);
402 if (target64) {
403 Push32(buf, data & 0xFFFFFFFF);
404 Push32(buf, (data >> 32) & 0xFFFFFFFF);
buzbee0d829482013-10-11 15:24:55 -0700405 } else {
Andreas Gampe2da88232014-02-27 12:26:20 -0800406 Push32(buf, static_cast<uint32_t>(data));
buzbee0d829482013-10-11 15:24:55 -0700407 }
408}
409
Brian Carlstrom7940e442013-07-12 13:46:57 -0700410static void AlignBuffer(std::vector<uint8_t>&buf, size_t offset) {
411 while (buf.size() < offset) {
412 buf.push_back(0);
413 }
414}
415
416/* Write the literal pool to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700417void Mir2Lir::InstallLiteralPools() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700418 AlignBuffer(code_buffer_, data_offset_);
419 LIR* data_lir = literal_list_;
420 while (data_lir != NULL) {
Andreas Gampe2da88232014-02-27 12:26:20 -0800421 Push32(code_buffer_, data_lir->operands[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700422 data_lir = NEXT_LIR(data_lir);
423 }
424 // Push code and method literals, record offsets for the compiler to patch.
425 data_lir = code_literal_list_;
426 while (data_lir != NULL) {
Jeff Hao49161ce2014-03-12 11:05:25 -0700427 uint32_t target_method_idx = data_lir->operands[0];
428 const DexFile* target_dex_file =
429 reinterpret_cast<const DexFile*>(UnwrapPointer(data_lir->operands[1]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700430 cu_->compiler_driver->AddCodePatch(cu_->dex_file,
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700431 cu_->class_def_idx,
432 cu_->method_idx,
433 cu_->invoke_type,
Jeff Hao49161ce2014-03-12 11:05:25 -0700434 target_method_idx,
435 target_dex_file,
436 static_cast<InvokeType>(data_lir->operands[2]),
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700437 code_buffer_.size());
Jeff Hao49161ce2014-03-12 11:05:25 -0700438 const DexFile::MethodId& target_method_id = target_dex_file->GetMethodId(target_method_idx);
buzbee0d829482013-10-11 15:24:55 -0700439 // unique value based on target to ensure code deduplication works
Jeff Hao49161ce2014-03-12 11:05:25 -0700440 PushPointer(code_buffer_, &target_method_id, cu_->target64);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700441 data_lir = NEXT_LIR(data_lir);
442 }
443 data_lir = method_literal_list_;
444 while (data_lir != NULL) {
Jeff Hao49161ce2014-03-12 11:05:25 -0700445 uint32_t target_method_idx = data_lir->operands[0];
446 const DexFile* target_dex_file =
447 reinterpret_cast<const DexFile*>(UnwrapPointer(data_lir->operands[1]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700448 cu_->compiler_driver->AddMethodPatch(cu_->dex_file,
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700449 cu_->class_def_idx,
450 cu_->method_idx,
451 cu_->invoke_type,
Jeff Hao49161ce2014-03-12 11:05:25 -0700452 target_method_idx,
453 target_dex_file,
454 static_cast<InvokeType>(data_lir->operands[2]),
Ian Rogers8b2c0b92013-09-19 02:56:49 -0700455 code_buffer_.size());
Jeff Hao49161ce2014-03-12 11:05:25 -0700456 const DexFile::MethodId& target_method_id = target_dex_file->GetMethodId(target_method_idx);
buzbee0d829482013-10-11 15:24:55 -0700457 // unique value based on target to ensure code deduplication works
Jeff Hao49161ce2014-03-12 11:05:25 -0700458 PushPointer(code_buffer_, &target_method_id, cu_->target64);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700459 data_lir = NEXT_LIR(data_lir);
460 }
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800461 // Push class literals.
462 data_lir = class_literal_list_;
463 while (data_lir != NULL) {
Jeff Hao49161ce2014-03-12 11:05:25 -0700464 uint32_t target_method_idx = data_lir->operands[0];
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800465 cu_->compiler_driver->AddClassPatch(cu_->dex_file,
466 cu_->class_def_idx,
467 cu_->method_idx,
Jeff Hao49161ce2014-03-12 11:05:25 -0700468 target_method_idx,
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800469 code_buffer_.size());
Jeff Hao49161ce2014-03-12 11:05:25 -0700470 const DexFile::TypeId& target_method_id = cu_->dex_file->GetTypeId(target_method_idx);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800471 // unique value based on target to ensure code deduplication works
Jeff Hao49161ce2014-03-12 11:05:25 -0700472 PushPointer(code_buffer_, &target_method_id, cu_->target64);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800473 data_lir = NEXT_LIR(data_lir);
474 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700475}
476
477/* Write the switch tables to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700478void Mir2Lir::InstallSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700479 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
480 while (true) {
481 Mir2Lir::SwitchTable* tab_rec = iterator.Next();
482 if (tab_rec == NULL) break;
483 AlignBuffer(code_buffer_, tab_rec->offset);
484 /*
485 * For Arm, our reference point is the address of the bx
486 * instruction that does the launch, so we have to subtract
487 * the auto pc-advance. For other targets the reference point
488 * is a label, so we can use the offset as-is.
489 */
490 int bx_offset = INVALID_OFFSET;
491 switch (cu_->instruction_set) {
492 case kThumb2:
buzbeeb48819d2013-09-14 16:15:25 -0700493 DCHECK(tab_rec->anchor->flags.fixup != kFixupNone);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700494 bx_offset = tab_rec->anchor->offset + 4;
495 break;
496 case kX86:
Dmitry Petrochenko6a58cb12014-04-02 17:27:59 +0700497 case kX86_64:
Brian Carlstrom7940e442013-07-12 13:46:57 -0700498 bx_offset = 0;
499 break;
500 case kMips:
501 bx_offset = tab_rec->anchor->offset;
502 break;
503 default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set;
504 }
505 if (cu_->verbose) {
506 LOG(INFO) << "Switch table for offset 0x" << std::hex << bx_offset;
507 }
508 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
buzbee0d829482013-10-11 15:24:55 -0700509 const int32_t* keys = reinterpret_cast<const int32_t*>(&(tab_rec->table[2]));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700510 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
511 int disp = tab_rec->targets[elems]->offset - bx_offset;
512 if (cu_->verbose) {
513 LOG(INFO) << " Case[" << elems << "] key: 0x"
514 << std::hex << keys[elems] << ", disp: 0x"
515 << std::hex << disp;
516 }
Andreas Gampe2da88232014-02-27 12:26:20 -0800517 Push32(code_buffer_, keys[elems]);
518 Push32(code_buffer_,
Brian Carlstrom7940e442013-07-12 13:46:57 -0700519 tab_rec->targets[elems]->offset - bx_offset);
520 }
521 } else {
522 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
523 static_cast<int>(Instruction::kPackedSwitchSignature));
524 for (int elems = 0; elems < tab_rec->table[1]; elems++) {
525 int disp = tab_rec->targets[elems]->offset - bx_offset;
526 if (cu_->verbose) {
527 LOG(INFO) << " Case[" << elems << "] disp: 0x"
528 << std::hex << disp;
529 }
Andreas Gampe2da88232014-02-27 12:26:20 -0800530 Push32(code_buffer_, tab_rec->targets[elems]->offset - bx_offset);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700531 }
532 }
533 }
534}
535
536/* Write the fill array dta to the output stream */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700537void Mir2Lir::InstallFillArrayData() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700538 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
539 while (true) {
540 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
541 if (tab_rec == NULL) break;
542 AlignBuffer(code_buffer_, tab_rec->offset);
543 for (int i = 0; i < (tab_rec->size + 1) / 2; i++) {
Brian Carlstromdf629502013-07-17 22:39:56 -0700544 code_buffer_.push_back(tab_rec->table[i] & 0xFF);
545 code_buffer_.push_back((tab_rec->table[i] >> 8) & 0xFF);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700546 }
547 }
548}
549
buzbee0d829482013-10-11 15:24:55 -0700550static int AssignLiteralOffsetCommon(LIR* lir, CodeOffset offset) {
Brian Carlstrom02c8cc62013-07-18 15:54:44 -0700551 for (; lir != NULL; lir = lir->next) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700552 lir->offset = offset;
553 offset += 4;
554 }
555 return offset;
556}
557
Ian Rogersff093b32014-04-30 19:04:27 -0700558static int AssignLiteralPointerOffsetCommon(LIR* lir, CodeOffset offset,
559 unsigned int element_size) {
buzbee0d829482013-10-11 15:24:55 -0700560 // Align to natural pointer size.
Andreas Gampe66018822014-05-05 20:47:19 -0700561 offset = RoundUp(offset, element_size);
buzbee0d829482013-10-11 15:24:55 -0700562 for (; lir != NULL; lir = lir->next) {
563 lir->offset = offset;
564 offset += element_size;
565 }
566 return offset;
567}
568
Brian Carlstrom7940e442013-07-12 13:46:57 -0700569// Make sure we have a code address for every declared catch entry
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700570bool Mir2Lir::VerifyCatchEntries() {
Vladimir Marko06606b92013-12-02 15:31:08 +0000571 MappingTable table(&encoded_mapping_table_[0]);
572 std::vector<uint32_t> dex_pcs;
573 dex_pcs.reserve(table.DexToPcSize());
574 for (auto it = table.DexToPcBegin(), end = table.DexToPcEnd(); it != end; ++it) {
575 dex_pcs.push_back(it.DexPc());
576 }
577 // Sort dex_pcs, so that we can quickly check it against the ordered mir_graph_->catches_.
578 std::sort(dex_pcs.begin(), dex_pcs.end());
579
Brian Carlstrom7940e442013-07-12 13:46:57 -0700580 bool success = true;
Vladimir Marko06606b92013-12-02 15:31:08 +0000581 auto it = dex_pcs.begin(), end = dex_pcs.end();
582 for (uint32_t dex_pc : mir_graph_->catches_) {
583 while (it != end && *it < dex_pc) {
584 LOG(INFO) << "Unexpected catch entry @ dex pc 0x" << std::hex << *it;
585 ++it;
586 success = false;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700587 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000588 if (it == end || *it > dex_pc) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700589 LOG(INFO) << "Missing native PC for catch entry @ 0x" << std::hex << dex_pc;
590 success = false;
Vladimir Marko06606b92013-12-02 15:31:08 +0000591 } else {
592 ++it;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700593 }
594 }
595 if (!success) {
596 LOG(INFO) << "Bad dex2pcMapping table in " << PrettyMethod(cu_->method_idx, *cu_->dex_file);
597 LOG(INFO) << "Entries @ decode: " << mir_graph_->catches_.size() << ", Entries in table: "
Vladimir Marko06606b92013-12-02 15:31:08 +0000598 << table.DexToPcSize();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700599 }
600 return success;
601}
602
603
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700604void Mir2Lir::CreateMappingTables() {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000605 uint32_t pc2dex_data_size = 0u;
606 uint32_t pc2dex_entries = 0u;
607 uint32_t pc2dex_offset = 0u;
608 uint32_t pc2dex_dalvik_offset = 0u;
609 uint32_t dex2pc_data_size = 0u;
610 uint32_t dex2pc_entries = 0u;
611 uint32_t dex2pc_offset = 0u;
612 uint32_t dex2pc_dalvik_offset = 0u;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700613 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
614 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000615 pc2dex_entries += 1;
616 DCHECK(pc2dex_offset <= tgt_lir->offset);
617 pc2dex_data_size += UnsignedLeb128Size(tgt_lir->offset - pc2dex_offset);
618 pc2dex_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) -
619 static_cast<int32_t>(pc2dex_dalvik_offset));
620 pc2dex_offset = tgt_lir->offset;
621 pc2dex_dalvik_offset = tgt_lir->dalvik_offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700622 }
623 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000624 dex2pc_entries += 1;
625 DCHECK(dex2pc_offset <= tgt_lir->offset);
626 dex2pc_data_size += UnsignedLeb128Size(tgt_lir->offset - dex2pc_offset);
627 dex2pc_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) -
628 static_cast<int32_t>(dex2pc_dalvik_offset));
629 dex2pc_offset = tgt_lir->offset;
630 dex2pc_dalvik_offset = tgt_lir->dalvik_offset;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700631 }
632 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000633
634 uint32_t total_entries = pc2dex_entries + dex2pc_entries;
635 uint32_t hdr_data_size = UnsignedLeb128Size(total_entries) + UnsignedLeb128Size(pc2dex_entries);
636 uint32_t data_size = hdr_data_size + pc2dex_data_size + dex2pc_data_size;
Vladimir Marko06606b92013-12-02 15:31:08 +0000637 encoded_mapping_table_.resize(data_size);
638 uint8_t* write_pos = &encoded_mapping_table_[0];
639 write_pos = EncodeUnsignedLeb128(write_pos, total_entries);
640 write_pos = EncodeUnsignedLeb128(write_pos, pc2dex_entries);
641 DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]), hdr_data_size);
642 uint8_t* write_pos2 = write_pos + pc2dex_data_size;
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000643
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000644 pc2dex_offset = 0u;
645 pc2dex_dalvik_offset = 0u;
Vladimir Marko06606b92013-12-02 15:31:08 +0000646 dex2pc_offset = 0u;
647 dex2pc_dalvik_offset = 0u;
648 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
649 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
650 DCHECK(pc2dex_offset <= tgt_lir->offset);
651 write_pos = EncodeUnsignedLeb128(write_pos, tgt_lir->offset - pc2dex_offset);
652 write_pos = EncodeSignedLeb128(write_pos, static_cast<int32_t>(tgt_lir->dalvik_offset) -
653 static_cast<int32_t>(pc2dex_dalvik_offset));
654 pc2dex_offset = tgt_lir->offset;
655 pc2dex_dalvik_offset = tgt_lir->dalvik_offset;
656 }
657 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
658 DCHECK(dex2pc_offset <= tgt_lir->offset);
659 write_pos2 = EncodeUnsignedLeb128(write_pos2, tgt_lir->offset - dex2pc_offset);
660 write_pos2 = EncodeSignedLeb128(write_pos2, static_cast<int32_t>(tgt_lir->dalvik_offset) -
661 static_cast<int32_t>(dex2pc_dalvik_offset));
662 dex2pc_offset = tgt_lir->offset;
663 dex2pc_dalvik_offset = tgt_lir->dalvik_offset;
664 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000665 }
Vladimir Marko06606b92013-12-02 15:31:08 +0000666 DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]),
667 hdr_data_size + pc2dex_data_size);
668 DCHECK_EQ(static_cast<size_t>(write_pos2 - &encoded_mapping_table_[0]), data_size);
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000669
Ian Rogers96faf5b2013-08-09 22:05:32 -0700670 if (kIsDebugBuild) {
Vladimir Marko06606b92013-12-02 15:31:08 +0000671 CHECK(VerifyCatchEntries());
672
Ian Rogers96faf5b2013-08-09 22:05:32 -0700673 // Verify the encoded table holds the expected data.
Vladimir Marko06606b92013-12-02 15:31:08 +0000674 MappingTable table(&encoded_mapping_table_[0]);
Ian Rogers96faf5b2013-08-09 22:05:32 -0700675 CHECK_EQ(table.TotalSize(), total_entries);
676 CHECK_EQ(table.PcToDexSize(), pc2dex_entries);
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000677 auto it = table.PcToDexBegin();
Vladimir Marko06606b92013-12-02 15:31:08 +0000678 auto it2 = table.DexToPcBegin();
679 for (LIR* tgt_lir = first_lir_insn_; tgt_lir != NULL; tgt_lir = NEXT_LIR(tgt_lir)) {
680 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) {
681 CHECK_EQ(tgt_lir->offset, it.NativePcOffset());
682 CHECK_EQ(tgt_lir->dalvik_offset, it.DexPc());
683 ++it;
684 }
685 if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) {
686 CHECK_EQ(tgt_lir->offset, it2.NativePcOffset());
687 CHECK_EQ(tgt_lir->dalvik_offset, it2.DexPc());
688 ++it2;
689 }
Ian Rogers96faf5b2013-08-09 22:05:32 -0700690 }
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000691 CHECK(it == table.PcToDexEnd());
Vladimir Marko1e6cb632013-11-28 16:27:29 +0000692 CHECK(it2 == table.DexToPcEnd());
Ian Rogers96faf5b2013-08-09 22:05:32 -0700693 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700694}
695
Brian Carlstrom7940e442013-07-12 13:46:57 -0700696void Mir2Lir::CreateNativeGcMap() {
Vladimir Marko06606b92013-12-02 15:31:08 +0000697 DCHECK(!encoded_mapping_table_.empty());
698 MappingTable mapping_table(&encoded_mapping_table_[0]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700699 uint32_t max_native_offset = 0;
Vladimir Marko06606b92013-12-02 15:31:08 +0000700 for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) {
701 uint32_t native_offset = it.NativePcOffset();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700702 if (native_offset > max_native_offset) {
703 max_native_offset = native_offset;
704 }
705 }
706 MethodReference method_ref(cu_->dex_file, cu_->method_idx);
Vladimir Marko2730db02014-01-27 11:15:17 +0000707 const std::vector<uint8_t>& gc_map_raw =
708 mir_graph_->GetCurrentDexCompilationUnit()->GetVerifiedMethod()->GetDexGcMap();
709 verifier::DexPcToReferenceMap dex_gc_map(&(gc_map_raw)[0]);
710 DCHECK_EQ(gc_map_raw.size(), dex_gc_map.RawSize());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700711 // Compute native offset to references size.
Nicolas Geoffray92cf83e2014-03-18 17:59:20 +0000712 GcMapBuilder native_gc_map_builder(&native_gc_map_,
713 mapping_table.PcToDexSize(),
714 max_native_offset, dex_gc_map.RegWidth());
Brian Carlstrom7940e442013-07-12 13:46:57 -0700715
Vladimir Marko06606b92013-12-02 15:31:08 +0000716 for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) {
717 uint32_t native_offset = it.NativePcOffset();
718 uint32_t dex_pc = it.DexPc();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700719 const uint8_t* references = dex_gc_map.FindBitMap(dex_pc, false);
Dave Allisonf9439142014-03-27 15:10:22 -0700720 CHECK(references != NULL) << "Missing ref for dex pc 0x" << std::hex << dex_pc <<
721 ": " << PrettyMethod(cu_->method_idx, *cu_->dex_file);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700722 native_gc_map_builder.AddEntry(native_offset, references);
723 }
724}
725
726/* Determine the offset of each literal field */
buzbee0d829482013-10-11 15:24:55 -0700727int Mir2Lir::AssignLiteralOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700728 offset = AssignLiteralOffsetCommon(literal_list_, offset);
Ian Rogersff093b32014-04-30 19:04:27 -0700729 unsigned int ptr_size = GetInstructionSetPointerSize(cu_->instruction_set);
730 offset = AssignLiteralPointerOffsetCommon(code_literal_list_, offset, ptr_size);
731 offset = AssignLiteralPointerOffsetCommon(method_literal_list_, offset, ptr_size);
732 offset = AssignLiteralPointerOffsetCommon(class_literal_list_, offset, ptr_size);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700733 return offset;
734}
735
buzbee0d829482013-10-11 15:24:55 -0700736int Mir2Lir::AssignSwitchTablesOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700737 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
738 while (true) {
buzbee0d829482013-10-11 15:24:55 -0700739 Mir2Lir::SwitchTable* tab_rec = iterator.Next();
Brian Carlstrom7940e442013-07-12 13:46:57 -0700740 if (tab_rec == NULL) break;
741 tab_rec->offset = offset;
742 if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
743 offset += tab_rec->table[1] * (sizeof(int) * 2);
744 } else {
745 DCHECK_EQ(static_cast<int>(tab_rec->table[0]),
746 static_cast<int>(Instruction::kPackedSwitchSignature));
747 offset += tab_rec->table[1] * sizeof(int);
748 }
749 }
750 return offset;
751}
752
buzbee0d829482013-10-11 15:24:55 -0700753int Mir2Lir::AssignFillArrayDataOffset(CodeOffset offset) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700754 GrowableArray<FillArrayData*>::Iterator iterator(&fill_array_data_);
755 while (true) {
756 Mir2Lir::FillArrayData *tab_rec = iterator.Next();
757 if (tab_rec == NULL) break;
758 tab_rec->offset = offset;
759 offset += tab_rec->size;
760 // word align
Andreas Gampe66018822014-05-05 20:47:19 -0700761 offset = RoundUp(offset, 4);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700762 }
763 return offset;
764}
765
Brian Carlstrom7940e442013-07-12 13:46:57 -0700766/*
767 * Insert a kPseudoCaseLabel at the beginning of the Dalvik
buzbeeb48819d2013-09-14 16:15:25 -0700768 * offset vaddr if pretty-printing, otherise use the standard block
769 * label. The selected label will be used to fix up the case
buzbee252254b2013-09-08 16:20:53 -0700770 * branch table during the assembly phase. All resource flags
771 * are set to prevent code motion. KeyVal is just there for debugging.
Brian Carlstrom7940e442013-07-12 13:46:57 -0700772 */
buzbee0d829482013-10-11 15:24:55 -0700773LIR* Mir2Lir::InsertCaseLabel(DexOffset vaddr, int keyVal) {
buzbee252254b2013-09-08 16:20:53 -0700774 LIR* boundary_lir = &block_label_list_[mir_graph_->FindBlock(vaddr)->id];
buzbeeb48819d2013-09-14 16:15:25 -0700775 LIR* res = boundary_lir;
776 if (cu_->verbose) {
777 // Only pay the expense if we're pretty-printing.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000778 LIR* new_label = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocLIR));
buzbeeb48819d2013-09-14 16:15:25 -0700779 new_label->dalvik_offset = vaddr;
780 new_label->opcode = kPseudoCaseLabel;
781 new_label->operands[0] = keyVal;
782 new_label->flags.fixup = kFixupLabel;
783 DCHECK(!new_label->flags.use_def_invalid);
784 new_label->u.m.def_mask = ENCODE_ALL;
785 InsertLIRAfter(boundary_lir, new_label);
786 res = new_label;
787 }
788 return res;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700789}
790
buzbee0d829482013-10-11 15:24:55 -0700791void Mir2Lir::MarkPackedCaseLabels(Mir2Lir::SwitchTable* tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700792 const uint16_t* table = tab_rec->table;
buzbee0d829482013-10-11 15:24:55 -0700793 DexOffset base_vaddr = tab_rec->vaddr;
794 const int32_t *targets = reinterpret_cast<const int32_t*>(&table[4]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700795 int entries = table[1];
796 int low_key = s4FromSwitchData(&table[2]);
797 for (int i = 0; i < entries; i++) {
798 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], i + low_key);
799 }
800}
801
buzbee0d829482013-10-11 15:24:55 -0700802void Mir2Lir::MarkSparseCaseLabels(Mir2Lir::SwitchTable* tab_rec) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700803 const uint16_t* table = tab_rec->table;
buzbee0d829482013-10-11 15:24:55 -0700804 DexOffset base_vaddr = tab_rec->vaddr;
Brian Carlstrom7940e442013-07-12 13:46:57 -0700805 int entries = table[1];
buzbee0d829482013-10-11 15:24:55 -0700806 const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]);
807 const int32_t* targets = &keys[entries];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700808 for (int i = 0; i < entries; i++) {
809 tab_rec->targets[i] = InsertCaseLabel(base_vaddr + targets[i], keys[i]);
810 }
811}
812
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700813void Mir2Lir::ProcessSwitchTables() {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700814 GrowableArray<SwitchTable*>::Iterator iterator(&switch_tables_);
815 while (true) {
816 Mir2Lir::SwitchTable *tab_rec = iterator.Next();
817 if (tab_rec == NULL) break;
818 if (tab_rec->table[0] == Instruction::kPackedSwitchSignature) {
819 MarkPackedCaseLabels(tab_rec);
820 } else if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) {
821 MarkSparseCaseLabels(tab_rec);
822 } else {
823 LOG(FATAL) << "Invalid switch table";
824 }
825 }
826}
827
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700828void Mir2Lir::DumpSparseSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700829 /*
830 * Sparse switch data format:
831 * ushort ident = 0x0200 magic value
832 * ushort size number of entries in the table; > 0
833 * int keys[size] keys, sorted low-to-high; 32-bit aligned
834 * int targets[size] branch targets, relative to switch opcode
835 *
836 * Total size is (2+size*4) 16-bit code units.
837 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700838 uint16_t ident = table[0];
839 int entries = table[1];
buzbee0d829482013-10-11 15:24:55 -0700840 const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]);
841 const int32_t* targets = &keys[entries];
Brian Carlstrom7940e442013-07-12 13:46:57 -0700842 LOG(INFO) << "Sparse switch table - ident:0x" << std::hex << ident
843 << ", entries: " << std::dec << entries;
844 for (int i = 0; i < entries; i++) {
845 LOG(INFO) << " Key[" << keys[i] << "] -> 0x" << std::hex << targets[i];
846 }
847}
848
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700849void Mir2Lir::DumpPackedSwitchTable(const uint16_t* table) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700850 /*
851 * Packed switch data format:
852 * ushort ident = 0x0100 magic value
853 * ushort size number of entries in the table
854 * int first_key first (and lowest) switch case value
855 * int targets[size] branch targets, relative to switch opcode
856 *
857 * Total size is (4+size*2) 16-bit code units.
858 */
Brian Carlstrom7940e442013-07-12 13:46:57 -0700859 uint16_t ident = table[0];
buzbee0d829482013-10-11 15:24:55 -0700860 const int32_t* targets = reinterpret_cast<const int32_t*>(&table[4]);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700861 int entries = table[1];
862 int low_key = s4FromSwitchData(&table[2]);
863 LOG(INFO) << "Packed switch table - ident:0x" << std::hex << ident
864 << ", entries: " << std::dec << entries << ", low_key: " << low_key;
865 for (int i = 0; i < entries; i++) {
866 LOG(INFO) << " Key[" << (i + low_key) << "] -> 0x" << std::hex
867 << targets[i];
868 }
869}
870
buzbee252254b2013-09-08 16:20:53 -0700871/* Set up special LIR to mark a Dalvik byte-code instruction start for pretty printing */
buzbee0d829482013-10-11 15:24:55 -0700872void Mir2Lir::MarkBoundary(DexOffset offset, const char* inst_str) {
873 // NOTE: only used for debug listings.
874 NewLIR1(kPseudoDalvikByteCodeBoundary, WrapPointer(ArenaStrdup(inst_str)));
Brian Carlstrom7940e442013-07-12 13:46:57 -0700875}
876
Brian Carlstrom2ce745c2013-07-17 17:44:30 -0700877bool Mir2Lir::EvaluateBranch(Instruction::Code opcode, int32_t src1, int32_t src2) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700878 bool is_taken;
879 switch (opcode) {
880 case Instruction::IF_EQ: is_taken = (src1 == src2); break;
881 case Instruction::IF_NE: is_taken = (src1 != src2); break;
882 case Instruction::IF_LT: is_taken = (src1 < src2); break;
883 case Instruction::IF_GE: is_taken = (src1 >= src2); break;
884 case Instruction::IF_GT: is_taken = (src1 > src2); break;
885 case Instruction::IF_LE: is_taken = (src1 <= src2); break;
886 case Instruction::IF_EQZ: is_taken = (src1 == 0); break;
887 case Instruction::IF_NEZ: is_taken = (src1 != 0); break;
888 case Instruction::IF_LTZ: is_taken = (src1 < 0); break;
889 case Instruction::IF_GEZ: is_taken = (src1 >= 0); break;
890 case Instruction::IF_GTZ: is_taken = (src1 > 0); break;
891 case Instruction::IF_LEZ: is_taken = (src1 <= 0); break;
892 default:
893 LOG(FATAL) << "Unexpected opcode " << opcode;
894 is_taken = false;
895 }
896 return is_taken;
897}
898
899// Convert relation of src1/src2 to src2/src1
900ConditionCode Mir2Lir::FlipComparisonOrder(ConditionCode before) {
901 ConditionCode res;
902 switch (before) {
903 case kCondEq: res = kCondEq; break;
904 case kCondNe: res = kCondNe; break;
905 case kCondLt: res = kCondGt; break;
906 case kCondGt: res = kCondLt; break;
907 case kCondLe: res = kCondGe; break;
908 case kCondGe: res = kCondLe; break;
909 default:
910 res = static_cast<ConditionCode>(0);
911 LOG(FATAL) << "Unexpected ccode " << before;
912 }
913 return res;
914}
915
Vladimir Markoa1a70742014-03-03 10:28:05 +0000916ConditionCode Mir2Lir::NegateComparison(ConditionCode before) {
917 ConditionCode res;
918 switch (before) {
919 case kCondEq: res = kCondNe; break;
920 case kCondNe: res = kCondEq; break;
921 case kCondLt: res = kCondGe; break;
922 case kCondGt: res = kCondLe; break;
923 case kCondLe: res = kCondGt; break;
924 case kCondGe: res = kCondLt; break;
925 default:
926 res = static_cast<ConditionCode>(0);
927 LOG(FATAL) << "Unexpected ccode " << before;
928 }
929 return res;
930}
931
Brian Carlstrom7940e442013-07-12 13:46:57 -0700932// TODO: move to mir_to_lir.cc
933Mir2Lir::Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena)
934 : Backend(arena),
935 literal_list_(NULL),
936 method_literal_list_(NULL),
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800937 class_literal_list_(NULL),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700938 code_literal_list_(NULL),
buzbeeb48819d2013-09-14 16:15:25 -0700939 first_fixup_(NULL),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700940 cu_(cu),
941 mir_graph_(mir_graph),
942 switch_tables_(arena, 4, kGrowableArraySwitchTables),
943 fill_array_data_(arena, 4, kGrowableArrayFillArrayData),
buzbeebd663de2013-09-10 15:41:31 -0700944 tempreg_info_(arena, 20, kGrowableArrayMisc),
buzbee091cc402014-03-31 10:14:40 -0700945 reginfo_map_(arena, RegStorage::kMaxRegs, kGrowableArrayMisc),
buzbee0d829482013-10-11 15:24:55 -0700946 pointer_storage_(arena, 128, kGrowableArrayMisc),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700947 data_offset_(0),
948 total_size_(0),
949 block_label_list_(NULL),
buzbeed69835d2014-02-03 14:40:27 -0800950 promotion_map_(NULL),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700951 current_dalvik_offset_(0),
buzbeeb48819d2013-09-14 16:15:25 -0700952 estimated_native_code_size_(0),
Brian Carlstrom7940e442013-07-12 13:46:57 -0700953 reg_pool_(NULL),
954 live_sreg_(0),
955 num_core_spills_(0),
956 num_fp_spills_(0),
957 frame_size_(0),
958 core_spill_mask_(0),
959 fp_spill_mask_(0),
960 first_lir_insn_(NULL),
Dave Allisonbcec6fb2014-01-17 12:52:22 -0800961 last_lir_insn_(NULL),
962 slow_paths_(arena, 32, kGrowableArraySlowPaths) {
buzbee0d829482013-10-11 15:24:55 -0700963 // Reserve pointer id 0 for NULL.
964 size_t null_idx = WrapPointer(NULL);
965 DCHECK_EQ(null_idx, 0U);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700966}
967
968void Mir2Lir::Materialize() {
buzbeea61f4952013-08-23 14:27:06 -0700969 cu_->NewTimingSplit("RegisterAllocation");
Brian Carlstrom7940e442013-07-12 13:46:57 -0700970 CompilerInitializeRegAlloc(); // Needs to happen after SSA naming
971
972 /* Allocate Registers using simple local allocation scheme */
973 SimpleRegAlloc();
974
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800975 /* First try the custom light codegen for special cases. */
Vladimir Marko5816ed42013-11-27 17:04:20 +0000976 DCHECK(cu_->compiler_driver->GetMethodInlinerMap() != nullptr);
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800977 bool special_worked = cu_->compiler_driver->GetMethodInlinerMap()->GetMethodInliner(cu_->dex_file)
Vladimir Marko5816ed42013-11-27 17:04:20 +0000978 ->GenSpecial(this, cu_->method_idx);
Brian Carlstrom7940e442013-07-12 13:46:57 -0700979
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800980 /* Take normal path for converting MIR to LIR only if the special codegen did not succeed. */
981 if (special_worked == false) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700982 MethodMIR2LIR();
983 }
984
985 /* Method is not empty */
986 if (first_lir_insn_) {
Brian Carlstrom7940e442013-07-12 13:46:57 -0700987 // mark the targets of switch statement case labels
988 ProcessSwitchTables();
989
990 /* Convert LIR into machine code. */
991 AssembleLIR();
992
993 if (cu_->verbose) {
994 CodegenDump();
995 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700996 }
Brian Carlstrom7940e442013-07-12 13:46:57 -0700997}
998
999CompiledMethod* Mir2Lir::GetCompiledMethod() {
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001000 // Combine vmap tables - core regs, then fp regs - into vmap_table.
1001 Leb128EncodingVector vmap_encoder;
Brian Carlstrom7940e442013-07-12 13:46:57 -07001002 if (frame_size_ > 0) {
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001003 // Prefix the encoded data with its size.
1004 size_t size = core_vmap_table_.size() + 1 /* marker */ + fp_vmap_table_.size();
1005 vmap_encoder.Reserve(size + 1u); // All values are likely to be one byte in ULEB128 (<128).
1006 vmap_encoder.PushBackUnsigned(size);
1007 // Core regs may have been inserted out of order - sort first.
1008 std::sort(core_vmap_table_.begin(), core_vmap_table_.end());
1009 for (size_t i = 0 ; i < core_vmap_table_.size(); ++i) {
1010 // Copy, stripping out the phys register sort key.
1011 vmap_encoder.PushBackUnsigned(
1012 ~(-1 << VREG_NUM_WIDTH) & (core_vmap_table_[i] + VmapTable::kEntryAdjustment));
1013 }
1014 // Push a marker to take place of lr.
1015 vmap_encoder.PushBackUnsigned(VmapTable::kAdjustedFpMarker);
1016 // fp regs already sorted.
1017 for (uint32_t i = 0; i < fp_vmap_table_.size(); i++) {
1018 vmap_encoder.PushBackUnsigned(fp_vmap_table_[i] + VmapTable::kEntryAdjustment);
1019 }
Brian Carlstrom7940e442013-07-12 13:46:57 -07001020 } else {
Vladimir Marko81949632014-05-02 11:53:22 +01001021 DCHECK_EQ(POPCOUNT(core_spill_mask_), 0);
1022 DCHECK_EQ(POPCOUNT(fp_spill_mask_), 0);
Vladimir Marko2e589aa2014-02-25 17:53:53 +00001023 DCHECK_EQ(core_vmap_table_.size(), 0u);
1024 DCHECK_EQ(fp_vmap_table_.size(), 0u);
1025 vmap_encoder.PushBackUnsigned(0u); // Size is 0.
Brian Carlstrom7940e442013-07-12 13:46:57 -07001026 }
Mark Mendellae9fd932014-02-10 16:14:35 -08001027
1028 UniquePtr<std::vector<uint8_t> > cfi_info(ReturnCallFrameInformation());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001029 CompiledMethod* result =
Ian Rogers72d32622014-05-06 16:20:11 -07001030 new CompiledMethod(cu_->compiler_driver, cu_->instruction_set, code_buffer_, frame_size_,
Vladimir Marko06606b92013-12-02 15:31:08 +00001031 core_spill_mask_, fp_spill_mask_, encoded_mapping_table_,
Dave Allisond6ed6422014-04-09 23:36:15 +00001032 vmap_encoder.GetData(), native_gc_map_, cfi_info.get());
Brian Carlstrom7940e442013-07-12 13:46:57 -07001033 return result;
1034}
1035
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -08001036size_t Mir2Lir::GetMaxPossibleCompilerTemps() const {
1037 // Chose a reasonably small value in order to contain stack growth.
1038 // Backends that are smarter about spill region can return larger values.
1039 const size_t max_compiler_temps = 10;
1040 return max_compiler_temps;
1041}
1042
1043size_t Mir2Lir::GetNumBytesForCompilerTempSpillRegion() {
1044 // By default assume that the Mir2Lir will need one slot for each temporary.
1045 // If the backend can better determine temps that have non-overlapping ranges and
1046 // temps that do not need spilled, it can actually provide a small region.
1047 return (mir_graph_->GetNumUsedCompilerTemps() * sizeof(uint32_t));
1048}
1049
Brian Carlstrom7940e442013-07-12 13:46:57 -07001050int Mir2Lir::ComputeFrameSize() {
1051 /* Figure out the frame size */
Dmitry Petrochenkof29a4242014-05-05 20:28:47 +07001052 uint32_t size = num_core_spills_ * GetBytesPerGprSpillLocation(cu_->instruction_set)
1053 + num_fp_spills_ * GetBytesPerFprSpillLocation(cu_->instruction_set)
1054 + sizeof(uint32_t) // Filler.
1055 + (cu_->num_regs + cu_->num_outs) * sizeof(uint32_t)
1056 + GetNumBytesForCompilerTempSpillRegion();
Brian Carlstrom7940e442013-07-12 13:46:57 -07001057 /* Align and set */
Andreas Gampe66018822014-05-05 20:47:19 -07001058 return RoundUp(size, kStackAlignment);
Brian Carlstrom7940e442013-07-12 13:46:57 -07001059}
1060
1061/*
1062 * Append an LIR instruction to the LIR list maintained by a compilation
1063 * unit
1064 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001065void Mir2Lir::AppendLIR(LIR* lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001066 if (first_lir_insn_ == NULL) {
1067 DCHECK(last_lir_insn_ == NULL);
1068 last_lir_insn_ = first_lir_insn_ = lir;
1069 lir->prev = lir->next = NULL;
1070 } else {
1071 last_lir_insn_->next = lir;
1072 lir->prev = last_lir_insn_;
1073 lir->next = NULL;
1074 last_lir_insn_ = lir;
1075 }
1076}
1077
1078/*
1079 * Insert an LIR instruction before the current instruction, which cannot be the
1080 * first instruction.
1081 *
1082 * prev_lir <-> new_lir <-> current_lir
1083 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001084void Mir2Lir::InsertLIRBefore(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001085 DCHECK(current_lir->prev != NULL);
1086 LIR *prev_lir = current_lir->prev;
1087
1088 prev_lir->next = new_lir;
1089 new_lir->prev = prev_lir;
1090 new_lir->next = current_lir;
1091 current_lir->prev = new_lir;
1092}
1093
1094/*
1095 * Insert an LIR instruction after the current instruction, which cannot be the
1096 * first instruction.
1097 *
1098 * current_lir -> new_lir -> old_next
1099 */
Brian Carlstrom2ce745c2013-07-17 17:44:30 -07001100void Mir2Lir::InsertLIRAfter(LIR* current_lir, LIR* new_lir) {
Brian Carlstrom7940e442013-07-12 13:46:57 -07001101 new_lir->prev = current_lir;
1102 new_lir->next = current_lir->next;
1103 current_lir->next = new_lir;
1104 new_lir->next->prev = new_lir;
1105}
1106
Mark Mendell4708dcd2014-01-22 09:05:18 -08001107bool Mir2Lir::IsPowerOfTwo(uint64_t x) {
1108 return (x & (x - 1)) == 0;
1109}
1110
1111// Returns the index of the lowest set bit in 'x'.
1112int32_t Mir2Lir::LowestSetBit(uint64_t x) {
1113 int bit_posn = 0;
1114 while ((x & 0xf) == 0) {
1115 bit_posn += 4;
1116 x >>= 4;
1117 }
1118 while ((x & 1) == 0) {
1119 bit_posn++;
1120 x >>= 1;
1121 }
1122 return bit_posn;
1123}
1124
1125bool Mir2Lir::BadOverlap(RegLocation rl_src, RegLocation rl_dest) {
1126 DCHECK(rl_src.wide);
1127 DCHECK(rl_dest.wide);
1128 return (abs(mir_graph_->SRegToVReg(rl_src.s_reg_low) - mir_graph_->SRegToVReg(rl_dest.s_reg_low)) == 1);
1129}
1130
buzbee2700f7e2014-03-07 09:46:20 -08001131LIR *Mir2Lir::OpCmpMemImmBranch(ConditionCode cond, RegStorage temp_reg, RegStorage base_reg,
Mark Mendell766e9292014-01-27 07:55:47 -08001132 int offset, int check_value, LIR* target) {
1133 // Handle this for architectures that can't compare to memory.
buzbee695d13a2014-04-19 13:32:20 -07001134 Load32Disp(base_reg, offset, temp_reg);
Mark Mendell766e9292014-01-27 07:55:47 -08001135 LIR* branch = OpCmpImmBranch(cond, temp_reg, check_value, target);
1136 return branch;
1137}
1138
Dave Allisonbcec6fb2014-01-17 12:52:22 -08001139void Mir2Lir::AddSlowPath(LIRSlowPath* slowpath) {
1140 slow_paths_.Insert(slowpath);
1141}
Mark Mendell55d0eac2014-02-06 11:02:52 -08001142
Jeff Hao49161ce2014-03-12 11:05:25 -07001143void Mir2Lir::LoadCodeAddress(const MethodReference& target_method, InvokeType type,
1144 SpecialTargetRegister symbolic_reg) {
1145 int target_method_idx = target_method.dex_method_index;
1146 LIR* data_target = ScanLiteralPool(code_literal_list_, target_method_idx, 0);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001147 if (data_target == NULL) {
Jeff Hao49161ce2014-03-12 11:05:25 -07001148 data_target = AddWordData(&code_literal_list_, target_method_idx);
1149 data_target->operands[1] = WrapPointer(const_cast<DexFile*>(target_method.dex_file));
1150 data_target->operands[2] = type;
Mark Mendell55d0eac2014-02-06 11:02:52 -08001151 }
1152 LIR* load_pc_rel = OpPcRelLoad(TargetReg(symbolic_reg), data_target);
1153 AppendLIR(load_pc_rel);
1154 DCHECK_NE(cu_->instruction_set, kMips) << reinterpret_cast<void*>(data_target);
1155}
1156
Jeff Hao49161ce2014-03-12 11:05:25 -07001157void Mir2Lir::LoadMethodAddress(const MethodReference& target_method, InvokeType type,
1158 SpecialTargetRegister symbolic_reg) {
1159 int target_method_idx = target_method.dex_method_index;
1160 LIR* data_target = ScanLiteralPool(method_literal_list_, target_method_idx, 0);
Mark Mendell55d0eac2014-02-06 11:02:52 -08001161 if (data_target == NULL) {
Jeff Hao49161ce2014-03-12 11:05:25 -07001162 data_target = AddWordData(&method_literal_list_, target_method_idx);
1163 data_target->operands[1] = WrapPointer(const_cast<DexFile*>(target_method.dex_file));
1164 data_target->operands[2] = type;
Mark Mendell55d0eac2014-02-06 11:02:52 -08001165 }
1166 LIR* load_pc_rel = OpPcRelLoad(TargetReg(symbolic_reg), data_target);
1167 AppendLIR(load_pc_rel);
1168 DCHECK_NE(cu_->instruction_set, kMips) << reinterpret_cast<void*>(data_target);
1169}
1170
1171void Mir2Lir::LoadClassType(uint32_t type_idx, SpecialTargetRegister symbolic_reg) {
1172 // Use the literal pool and a PC-relative load from a data word.
1173 LIR* data_target = ScanLiteralPool(class_literal_list_, type_idx, 0);
1174 if (data_target == nullptr) {
1175 data_target = AddWordData(&class_literal_list_, type_idx);
1176 }
1177 LIR* load_pc_rel = OpPcRelLoad(TargetReg(symbolic_reg), data_target);
1178 AppendLIR(load_pc_rel);
1179}
1180
Mark Mendellae9fd932014-02-10 16:14:35 -08001181std::vector<uint8_t>* Mir2Lir::ReturnCallFrameInformation() {
1182 // Default case is to do nothing.
1183 return nullptr;
1184}
1185
buzbee2700f7e2014-03-07 09:46:20 -08001186RegLocation Mir2Lir::NarrowRegLoc(RegLocation loc) {
1187 loc.wide = false;
buzbee091cc402014-03-31 10:14:40 -07001188 if (loc.location == kLocPhysReg) {
1189 if (loc.reg.IsPair()) {
1190 loc.reg = loc.reg.GetLow();
1191 } else {
1192 // FIXME: temp workaround.
1193 // Issue here: how do we narrow to a 32-bit value in 64-bit container?
1194 // Probably the wrong thing to narrow the RegStorage container here. That
1195 // should be a target decision. At the RegLocation level, we're only
1196 // modifying the view of the Dalvik value - this is orthogonal to the storage
1197 // container size. Consider this a temp workaround.
1198 DCHECK(loc.reg.IsDouble());
1199 loc.reg = loc.reg.DoubleToLowSingle();
1200 }
buzbee2700f7e2014-03-07 09:46:20 -08001201 }
1202 return loc;
1203}
1204
Brian Carlstrom7934ac22013-07-26 10:54:15 -07001205} // namespace art