Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Andreas Gampe | 0b9203e | 2015-01-22 20:39:27 -0800 | [diff] [blame] | 17 | #include "mir_to_lir-inl.h" |
| 18 | |
Vladimir Marko | 767c752 | 2015-03-20 12:47:30 +0000 | [diff] [blame] | 19 | #include "base/bit_vector-inl.h" |
Andreas Gampe | 0b9203e | 2015-01-22 20:39:27 -0800 | [diff] [blame] | 20 | #include "dex/mir_graph.h" |
| 21 | #include "driver/compiler_driver.h" |
Yevgeny Rouban | e3ea838 | 2014-08-08 16:29:38 +0700 | [diff] [blame] | 22 | #include "driver/compiler_options.h" |
Andreas Gampe | 0b9203e | 2015-01-22 20:39:27 -0800 | [diff] [blame] | 23 | #include "driver/dex_compilation_unit.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 24 | #include "dex_file-inl.h" |
| 25 | #include "gc_map.h" |
Nicolas Geoffray | 92cf83e | 2014-03-18 17:59:20 +0000 | [diff] [blame] | 26 | #include "gc_map_builder.h" |
Ian Rogers | 96faf5b | 2013-08-09 22:05:32 -0700 | [diff] [blame] | 27 | #include "mapping_table.h" |
Vladimir Marko | 5816ed4 | 2013-11-27 17:04:20 +0000 | [diff] [blame] | 28 | #include "dex/quick/dex_file_method_inliner.h" |
| 29 | #include "dex/quick/dex_file_to_method_inliner_map.h" |
Vladimir Marko | c7f8320 | 2014-01-24 17:55:18 +0000 | [diff] [blame] | 30 | #include "dex/verification_results.h" |
Vladimir Marko | 2730db0 | 2014-01-27 11:15:17 +0000 | [diff] [blame] | 31 | #include "dex/verified_method.h" |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 32 | #include "utils/dex_cache_arrays_layout-inl.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 33 | #include "verifier/dex_gc_map.h" |
| 34 | #include "verifier/method_verifier.h" |
Vladimir Marko | 2e589aa | 2014-02-25 17:53:53 +0000 | [diff] [blame] | 35 | #include "vmap_table.h" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 36 | |
| 37 | namespace art { |
| 38 | |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 39 | namespace { |
| 40 | |
| 41 | /* Dump a mapping table */ |
| 42 | template <typename It> |
| 43 | void DumpMappingTable(const char* table_name, const char* descriptor, const char* name, |
| 44 | const Signature& signature, uint32_t size, It first) { |
| 45 | if (size != 0) { |
Ian Rogers | 107c31e | 2014-01-23 20:55:29 -0800 | [diff] [blame] | 46 | std::string line(StringPrintf("\n %s %s%s_%s_table[%u] = {", table_name, |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 47 | descriptor, name, signature.ToString().c_str(), size)); |
| 48 | std::replace(line.begin(), line.end(), ';', '_'); |
| 49 | LOG(INFO) << line; |
| 50 | for (uint32_t i = 0; i != size; ++i) { |
| 51 | line = StringPrintf(" {0x%05x, 0x%04x},", first.NativePcOffset(), first.DexPc()); |
| 52 | ++first; |
| 53 | LOG(INFO) << line; |
| 54 | } |
| 55 | LOG(INFO) <<" };\n\n"; |
| 56 | } |
| 57 | } |
| 58 | |
| 59 | } // anonymous namespace |
| 60 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 61 | bool Mir2Lir::IsInexpensiveConstant(RegLocation rl_src) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 62 | bool res = false; |
| 63 | if (rl_src.is_const) { |
| 64 | if (rl_src.wide) { |
Andreas Gampe | de0b996 | 2014-08-27 14:24:42 -0700 | [diff] [blame] | 65 | // For wide registers, check whether we're the high partner. In that case we need to switch |
| 66 | // to the lower one for the correct value. |
| 67 | if (rl_src.high_word) { |
| 68 | rl_src.high_word = false; |
| 69 | rl_src.s_reg_low--; |
| 70 | rl_src.orig_sreg--; |
| 71 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 72 | if (rl_src.fp) { |
Andreas Gampe | de0b996 | 2014-08-27 14:24:42 -0700 | [diff] [blame] | 73 | res = InexpensiveConstantDouble(mir_graph_->ConstantValueWide(rl_src)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 74 | } else { |
Andreas Gampe | de0b996 | 2014-08-27 14:24:42 -0700 | [diff] [blame] | 75 | res = InexpensiveConstantLong(mir_graph_->ConstantValueWide(rl_src)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 76 | } |
| 77 | } else { |
| 78 | if (rl_src.fp) { |
Andreas Gampe | de0b996 | 2014-08-27 14:24:42 -0700 | [diff] [blame] | 79 | res = InexpensiveConstantFloat(mir_graph_->ConstantValue(rl_src)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 80 | } else { |
Andreas Gampe | de0b996 | 2014-08-27 14:24:42 -0700 | [diff] [blame] | 81 | res = InexpensiveConstantInt(mir_graph_->ConstantValue(rl_src)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 82 | } |
| 83 | } |
| 84 | } |
| 85 | return res; |
| 86 | } |
| 87 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 88 | void Mir2Lir::MarkSafepointPC(LIR* inst) { |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 89 | DCHECK(!inst->flags.use_def_invalid); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 90 | inst->u.m.def_mask = &kEncodeAll; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 91 | LIR* safepoint_pc = NewLIR0(kPseudoSafepointPC); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 92 | DCHECK(safepoint_pc->u.m.def_mask->Equals(kEncodeAll)); |
Vladimir Marko | 767c752 | 2015-03-20 12:47:30 +0000 | [diff] [blame] | 93 | DCHECK(current_mir_ != nullptr || (current_dalvik_offset_ == 0 && safepoints_.empty())); |
| 94 | safepoints_.emplace_back(safepoint_pc, current_mir_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 95 | } |
| 96 | |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 97 | void Mir2Lir::MarkSafepointPCAfter(LIR* after) { |
| 98 | DCHECK(!after->flags.use_def_invalid); |
| 99 | after->u.m.def_mask = &kEncodeAll; |
| 100 | // As NewLIR0 uses Append, we need to create the LIR by hand. |
| 101 | LIR* safepoint_pc = RawLIR(current_dalvik_offset_, kPseudoSafepointPC); |
| 102 | if (after->next == nullptr) { |
| 103 | DCHECK_EQ(after, last_lir_insn_); |
| 104 | AppendLIR(safepoint_pc); |
| 105 | } else { |
| 106 | InsertLIRAfter(after, safepoint_pc); |
| 107 | } |
| 108 | DCHECK(safepoint_pc->u.m.def_mask->Equals(kEncodeAll)); |
Vladimir Marko | 767c752 | 2015-03-20 12:47:30 +0000 | [diff] [blame] | 109 | DCHECK(current_mir_ != nullptr || (current_dalvik_offset_ == 0 && safepoints_.empty())); |
| 110 | safepoints_.emplace_back(safepoint_pc, current_mir_); |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 111 | } |
| 112 | |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 113 | /* Remove a LIR from the list. */ |
| 114 | void Mir2Lir::UnlinkLIR(LIR* lir) { |
| 115 | if (UNLIKELY(lir == first_lir_insn_)) { |
| 116 | first_lir_insn_ = lir->next; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 117 | if (lir->next != nullptr) { |
| 118 | lir->next->prev = nullptr; |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 119 | } else { |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 120 | DCHECK(lir->next == nullptr); |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 121 | DCHECK(lir == last_lir_insn_); |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 122 | last_lir_insn_ = nullptr; |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 123 | } |
| 124 | } else if (lir == last_lir_insn_) { |
| 125 | last_lir_insn_ = lir->prev; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 126 | lir->prev->next = nullptr; |
| 127 | } else if ((lir->prev != nullptr) && (lir->next != nullptr)) { |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 128 | lir->prev->next = lir->next; |
| 129 | lir->next->prev = lir->prev; |
| 130 | } |
| 131 | } |
| 132 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 133 | /* Convert an instruction to a NOP */ |
Brian Carlstrom | df62950 | 2013-07-17 22:39:56 -0700 | [diff] [blame] | 134 | void Mir2Lir::NopLIR(LIR* lir) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 135 | lir->flags.is_nop = true; |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 136 | if (!cu_->verbose) { |
| 137 | UnlinkLIR(lir); |
| 138 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 139 | } |
| 140 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 141 | void Mir2Lir::SetMemRefType(LIR* lir, bool is_load, int mem_type) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 142 | DCHECK(GetTargetInstFlags(lir->opcode) & (IS_LOAD | IS_STORE)); |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 143 | DCHECK(!lir->flags.use_def_invalid); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 144 | // TODO: Avoid the extra Arena allocation! |
| 145 | const ResourceMask** mask_ptr; |
| 146 | ResourceMask mask; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 147 | if (is_load) { |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 148 | mask_ptr = &lir->u.m.use_mask; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 149 | } else { |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 150 | mask_ptr = &lir->u.m.def_mask; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 151 | } |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 152 | mask = **mask_ptr; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 153 | /* Clear out the memref flags */ |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 154 | mask.ClearBits(kEncodeMem); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 155 | /* ..and then add back the one we need */ |
| 156 | switch (mem_type) { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 157 | case ResourceMask::kLiteral: |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 158 | DCHECK(is_load); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 159 | mask.SetBit(ResourceMask::kLiteral); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 160 | break; |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 161 | case ResourceMask::kDalvikReg: |
| 162 | mask.SetBit(ResourceMask::kDalvikReg); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 163 | break; |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 164 | case ResourceMask::kHeapRef: |
| 165 | mask.SetBit(ResourceMask::kHeapRef); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 166 | break; |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 167 | case ResourceMask::kMustNotAlias: |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 168 | /* Currently only loads can be marked as kMustNotAlias */ |
| 169 | DCHECK(!(GetTargetInstFlags(lir->opcode) & IS_STORE)); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 170 | mask.SetBit(ResourceMask::kMustNotAlias); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 171 | break; |
| 172 | default: |
| 173 | LOG(FATAL) << "Oat: invalid memref kind - " << mem_type; |
| 174 | } |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 175 | *mask_ptr = mask_cache_.GetMask(mask); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 176 | } |
| 177 | |
| 178 | /* |
| 179 | * Mark load/store instructions that access Dalvik registers through the stack. |
| 180 | */ |
| 181 | void Mir2Lir::AnnotateDalvikRegAccess(LIR* lir, int reg_id, bool is_load, |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 182 | bool is64bit) { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 183 | DCHECK((is_load ? lir->u.m.use_mask : lir->u.m.def_mask)->Intersection(kEncodeMem).Equals( |
| 184 | kEncodeDalvikReg)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 185 | |
| 186 | /* |
| 187 | * Store the Dalvik register id in alias_info. Mark the MSB if it is a 64-bit |
| 188 | * access. |
| 189 | */ |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 190 | lir->flags.alias_info = ENCODE_ALIAS_INFO(reg_id, is64bit); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 191 | } |
| 192 | |
| 193 | /* |
| 194 | * Debugging macros |
| 195 | */ |
| 196 | #define DUMP_RESOURCE_MASK(X) |
| 197 | |
| 198 | /* Pretty-print a LIR instruction */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 199 | void Mir2Lir::DumpLIRInsn(LIR* lir, unsigned char* base_addr) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 200 | int offset = lir->offset; |
| 201 | int dest = lir->operands[0]; |
| 202 | const bool dump_nop = (cu_->enable_debug & (1 << kDebugShowNops)); |
| 203 | |
| 204 | /* Handle pseudo-ops individually, and all regular insns as a group */ |
| 205 | switch (lir->opcode) { |
David Srbecky | 6f71589 | 2015-03-30 14:21:42 +0100 | [diff] [blame] | 206 | case kPseudoPrologueBegin: |
| 207 | LOG(INFO) << "-------- PrologueBegin"; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 208 | break; |
David Srbecky | 6f71589 | 2015-03-30 14:21:42 +0100 | [diff] [blame] | 209 | case kPseudoPrologueEnd: |
| 210 | LOG(INFO) << "-------- PrologueEnd"; |
| 211 | break; |
| 212 | case kPseudoEpilogueBegin: |
| 213 | LOG(INFO) << "-------- EpilogueBegin"; |
| 214 | break; |
| 215 | case kPseudoEpilogueEnd: |
| 216 | LOG(INFO) << "-------- EpilogueEnd"; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 217 | break; |
| 218 | case kPseudoBarrier: |
| 219 | LOG(INFO) << "-------- BARRIER"; |
| 220 | break; |
| 221 | case kPseudoEntryBlock: |
| 222 | LOG(INFO) << "-------- entry offset: 0x" << std::hex << dest; |
| 223 | break; |
| 224 | case kPseudoDalvikByteCodeBoundary: |
| 225 | if (lir->operands[0] == 0) { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 226 | // NOTE: only used for debug listings. |
| 227 | lir->operands[0] = WrapPointer(ArenaStrdup("No instruction string")); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 228 | } |
| 229 | LOG(INFO) << "-------- dalvik offset: 0x" << std::hex |
Bill Buzbee | 0b1191c | 2013-10-28 22:11:59 +0000 | [diff] [blame] | 230 | << lir->dalvik_offset << " @ " |
Vladimir Marko | f6737f7 | 2015-03-23 17:05:14 +0000 | [diff] [blame] | 231 | << UnwrapPointer<char>(lir->operands[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 232 | break; |
| 233 | case kPseudoExitBlock: |
| 234 | LOG(INFO) << "-------- exit offset: 0x" << std::hex << dest; |
| 235 | break; |
| 236 | case kPseudoPseudoAlign4: |
| 237 | LOG(INFO) << reinterpret_cast<uintptr_t>(base_addr) + offset << " (0x" << std::hex |
| 238 | << offset << "): .align4"; |
| 239 | break; |
| 240 | case kPseudoEHBlockLabel: |
| 241 | LOG(INFO) << "Exception_Handling:"; |
| 242 | break; |
| 243 | case kPseudoTargetLabel: |
| 244 | case kPseudoNormalBlockLabel: |
| 245 | LOG(INFO) << "L" << reinterpret_cast<void*>(lir) << ":"; |
| 246 | break; |
| 247 | case kPseudoThrowTarget: |
| 248 | LOG(INFO) << "LT" << reinterpret_cast<void*>(lir) << ":"; |
| 249 | break; |
| 250 | case kPseudoIntrinsicRetry: |
| 251 | LOG(INFO) << "IR" << reinterpret_cast<void*>(lir) << ":"; |
| 252 | break; |
| 253 | case kPseudoSuspendTarget: |
| 254 | LOG(INFO) << "LS" << reinterpret_cast<void*>(lir) << ":"; |
| 255 | break; |
| 256 | case kPseudoSafepointPC: |
| 257 | LOG(INFO) << "LsafepointPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":"; |
| 258 | break; |
| 259 | case kPseudoExportedPC: |
| 260 | LOG(INFO) << "LexportedPC_0x" << std::hex << lir->offset << "_" << lir->dalvik_offset << ":"; |
| 261 | break; |
| 262 | case kPseudoCaseLabel: |
| 263 | LOG(INFO) << "LC" << reinterpret_cast<void*>(lir) << ": Case target 0x" |
| 264 | << std::hex << lir->operands[0] << "|" << std::dec << |
| 265 | lir->operands[0]; |
| 266 | break; |
| 267 | default: |
| 268 | if (lir->flags.is_nop && !dump_nop) { |
| 269 | break; |
| 270 | } else { |
| 271 | std::string op_name(BuildInsnString(GetTargetInstName(lir->opcode), |
| 272 | lir, base_addr)); |
| 273 | std::string op_operands(BuildInsnString(GetTargetInstFmt(lir->opcode), |
| 274 | lir, base_addr)); |
David Srbecky | 6f71589 | 2015-03-30 14:21:42 +0100 | [diff] [blame] | 275 | LOG(INFO) << StringPrintf("%5p|0x%02x: %-9s%s%s", |
Ian Rogers | 107c31e | 2014-01-23 20:55:29 -0800 | [diff] [blame] | 276 | base_addr + offset, |
David Srbecky | 6f71589 | 2015-03-30 14:21:42 +0100 | [diff] [blame] | 277 | lir->dalvik_offset, |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 278 | op_name.c_str(), op_operands.c_str(), |
| 279 | lir->flags.is_nop ? "(nop)" : ""); |
| 280 | } |
| 281 | break; |
| 282 | } |
| 283 | |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 284 | if (lir->u.m.use_mask && (!lir->flags.is_nop || dump_nop)) { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 285 | DUMP_RESOURCE_MASK(DumpResourceMask(lir, *lir->u.m.use_mask, "use")); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 286 | } |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 287 | if (lir->u.m.def_mask && (!lir->flags.is_nop || dump_nop)) { |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 288 | DUMP_RESOURCE_MASK(DumpResourceMask(lir, *lir->u.m.def_mask, "def")); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 289 | } |
| 290 | } |
| 291 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 292 | void Mir2Lir::DumpPromotionMap() { |
Razvan A Lupusoru | 8d0d03e | 2014-06-06 17:04:52 -0700 | [diff] [blame] | 293 | uint32_t num_regs = mir_graph_->GetNumOfCodeAndTempVRs(); |
| 294 | for (uint32_t i = 0; i < num_regs; i++) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 295 | PromotionMap v_reg_map = promotion_map_[i]; |
| 296 | std::string buf; |
| 297 | if (v_reg_map.fp_location == kLocPhysReg) { |
buzbee | b5860fb | 2014-06-21 15:31:01 -0700 | [diff] [blame] | 298 | StringAppendF(&buf, " : s%d", RegStorage::RegNum(v_reg_map.fp_reg)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 299 | } |
| 300 | |
| 301 | std::string buf3; |
Razvan A Lupusoru | 8d0d03e | 2014-06-06 17:04:52 -0700 | [diff] [blame] | 302 | if (i < mir_graph_->GetNumOfCodeVRs()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 303 | StringAppendF(&buf3, "%02d", i); |
Razvan A Lupusoru | 8d0d03e | 2014-06-06 17:04:52 -0700 | [diff] [blame] | 304 | } else if (i == mir_graph_->GetNumOfCodeVRs()) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 305 | buf3 = "Method*"; |
| 306 | } else { |
Razvan A Lupusoru | 8d0d03e | 2014-06-06 17:04:52 -0700 | [diff] [blame] | 307 | uint32_t diff = i - mir_graph_->GetNumOfCodeVRs(); |
| 308 | StringAppendF(&buf3, "ct%d", diff); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 309 | } |
| 310 | |
| 311 | LOG(INFO) << StringPrintf("V[%s] -> %s%d%s", buf3.c_str(), |
| 312 | v_reg_map.core_location == kLocPhysReg ? |
| 313 | "r" : "SP+", v_reg_map.core_location == kLocPhysReg ? |
| 314 | v_reg_map.core_reg : SRegOffset(i), |
| 315 | buf.c_str()); |
| 316 | } |
| 317 | } |
| 318 | |
buzbee | 7a11ab0 | 2014-04-28 20:02:38 -0700 | [diff] [blame] | 319 | void Mir2Lir::UpdateLIROffsets() { |
| 320 | // Only used for code listings. |
| 321 | size_t offset = 0; |
| 322 | for (LIR* lir = first_lir_insn_; lir != nullptr; lir = lir->next) { |
| 323 | lir->offset = offset; |
| 324 | if (!lir->flags.is_nop && !IsPseudoLirOp(lir->opcode)) { |
| 325 | offset += GetInsnSize(lir); |
| 326 | } else if (lir->opcode == kPseudoPseudoAlign4) { |
| 327 | offset += (offset & 0x2); |
| 328 | } |
| 329 | } |
| 330 | } |
| 331 | |
Vladimir Marko | 743b98c | 2014-11-24 19:45:41 +0000 | [diff] [blame] | 332 | void Mir2Lir::MarkGCCard(int opt_flags, RegStorage val_reg, RegStorage tgt_addr_reg) { |
Vladimir Marko | bf535be | 2014-11-19 18:52:35 +0000 | [diff] [blame] | 333 | DCHECK(val_reg.Valid()); |
| 334 | DCHECK_EQ(val_reg.Is64Bit(), cu_->target64); |
Vladimir Marko | 743b98c | 2014-11-24 19:45:41 +0000 | [diff] [blame] | 335 | if ((opt_flags & MIR_STORE_NON_NULL_VALUE) != 0) { |
| 336 | UnconditionallyMarkGCCard(tgt_addr_reg); |
| 337 | } else { |
| 338 | LIR* branch_over = OpCmpImmBranch(kCondEq, val_reg, 0, nullptr); |
| 339 | UnconditionallyMarkGCCard(tgt_addr_reg); |
| 340 | LIR* target = NewLIR0(kPseudoTargetLabel); |
| 341 | branch_over->target = target; |
| 342 | } |
Vladimir Marko | bf535be | 2014-11-19 18:52:35 +0000 | [diff] [blame] | 343 | } |
| 344 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 345 | /* Dump instructions and constant pool contents */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 346 | void Mir2Lir::CodegenDump() { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 347 | LOG(INFO) << "Dumping LIR insns for " |
| 348 | << PrettyMethod(cu_->method_idx, *cu_->dex_file); |
| 349 | LIR* lir_insn; |
Razvan A Lupusoru | 7503597 | 2014-09-11 15:24:59 -0700 | [diff] [blame] | 350 | int insns_size = mir_graph_->GetNumDalvikInsns(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 351 | |
Razvan A Lupusoru | 8d0d03e | 2014-06-06 17:04:52 -0700 | [diff] [blame] | 352 | LOG(INFO) << "Regs (excluding ins) : " << mir_graph_->GetNumOfLocalCodeVRs(); |
| 353 | LOG(INFO) << "Ins : " << mir_graph_->GetNumOfInVRs(); |
| 354 | LOG(INFO) << "Outs : " << mir_graph_->GetNumOfOutVRs(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 355 | LOG(INFO) << "CoreSpills : " << num_core_spills_; |
| 356 | LOG(INFO) << "FPSpills : " << num_fp_spills_; |
Razvan A Lupusoru | da7a69b | 2014-01-08 15:09:50 -0800 | [diff] [blame] | 357 | LOG(INFO) << "CompilerTemps : " << mir_graph_->GetNumUsedCompilerTemps(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 358 | LOG(INFO) << "Frame size : " << frame_size_; |
| 359 | LOG(INFO) << "code size is " << total_size_ << |
| 360 | " bytes, Dalvik size is " << insns_size * 2; |
| 361 | LOG(INFO) << "expansion factor: " |
| 362 | << static_cast<float>(total_size_) / static_cast<float>(insns_size * 2); |
| 363 | DumpPromotionMap(); |
buzbee | 7a11ab0 | 2014-04-28 20:02:38 -0700 | [diff] [blame] | 364 | UpdateLIROffsets(); |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 365 | for (lir_insn = first_lir_insn_; lir_insn != nullptr; lir_insn = lir_insn->next) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 366 | DumpLIRInsn(lir_insn, 0); |
| 367 | } |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 368 | for (lir_insn = literal_list_; lir_insn != nullptr; lir_insn = lir_insn->next) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 369 | LOG(INFO) << StringPrintf("%x (%04x): .word (%#x)", lir_insn->offset, lir_insn->offset, |
| 370 | lir_insn->operands[0]); |
| 371 | } |
| 372 | |
| 373 | const DexFile::MethodId& method_id = |
| 374 | cu_->dex_file->GetMethodId(cu_->method_idx); |
Ian Rogers | d91d6d6 | 2013-09-25 20:26:14 -0700 | [diff] [blame] | 375 | const Signature signature = cu_->dex_file->GetMethodSignature(method_id); |
| 376 | const char* name = cu_->dex_file->GetMethodName(method_id); |
| 377 | const char* descriptor(cu_->dex_file->GetMethodDeclaringClassDescriptor(method_id)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 378 | |
| 379 | // Dump mapping tables |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 380 | if (!encoded_mapping_table_.empty()) { |
| 381 | MappingTable table(&encoded_mapping_table_[0]); |
| 382 | DumpMappingTable("PC2Dex_MappingTable", descriptor, name, signature, |
| 383 | table.PcToDexSize(), table.PcToDexBegin()); |
| 384 | DumpMappingTable("Dex2PC_MappingTable", descriptor, name, signature, |
| 385 | table.DexToPcSize(), table.DexToPcBegin()); |
| 386 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 387 | } |
| 388 | |
| 389 | /* |
| 390 | * Search the existing constants in the literal pool for an exact or close match |
| 391 | * within specified delta (greater or equal to 0). |
| 392 | */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 393 | LIR* Mir2Lir::ScanLiteralPool(LIR* data_target, int value, unsigned int delta) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 394 | while (data_target) { |
| 395 | if ((static_cast<unsigned>(value - data_target->operands[0])) <= delta) |
| 396 | return data_target; |
| 397 | data_target = data_target->next; |
| 398 | } |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 399 | return nullptr; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 400 | } |
| 401 | |
| 402 | /* Search the existing constants in the literal pool for an exact wide match */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 403 | LIR* Mir2Lir::ScanLiteralPoolWide(LIR* data_target, int val_lo, int val_hi) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 404 | bool lo_match = false; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 405 | LIR* lo_target = nullptr; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 406 | while (data_target) { |
| 407 | if (lo_match && (data_target->operands[0] == val_hi)) { |
| 408 | // Record high word in case we need to expand this later. |
| 409 | lo_target->operands[1] = val_hi; |
| 410 | return lo_target; |
| 411 | } |
| 412 | lo_match = false; |
| 413 | if (data_target->operands[0] == val_lo) { |
| 414 | lo_match = true; |
| 415 | lo_target = data_target; |
| 416 | } |
| 417 | data_target = data_target->next; |
| 418 | } |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 419 | return nullptr; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 420 | } |
| 421 | |
Vladimir Marko | a51a0b0 | 2014-05-21 12:08:39 +0100 | [diff] [blame] | 422 | /* Search the existing constants in the literal pool for an exact method match */ |
| 423 | LIR* Mir2Lir::ScanLiteralPoolMethod(LIR* data_target, const MethodReference& method) { |
| 424 | while (data_target) { |
| 425 | if (static_cast<uint32_t>(data_target->operands[0]) == method.dex_method_index && |
Vladimir Marko | f6737f7 | 2015-03-23 17:05:14 +0000 | [diff] [blame] | 426 | UnwrapPointer<DexFile>(data_target->operands[1]) == method.dex_file) { |
Vladimir Marko | a51a0b0 | 2014-05-21 12:08:39 +0100 | [diff] [blame] | 427 | return data_target; |
| 428 | } |
| 429 | data_target = data_target->next; |
| 430 | } |
| 431 | return nullptr; |
| 432 | } |
| 433 | |
Fred Shih | e7f82e2 | 2014-08-06 10:46:37 -0700 | [diff] [blame] | 434 | /* Search the existing constants in the literal pool for an exact class match */ |
| 435 | LIR* Mir2Lir::ScanLiteralPoolClass(LIR* data_target, const DexFile& dex_file, uint32_t type_idx) { |
| 436 | while (data_target) { |
| 437 | if (static_cast<uint32_t>(data_target->operands[0]) == type_idx && |
Vladimir Marko | f6737f7 | 2015-03-23 17:05:14 +0000 | [diff] [blame] | 438 | UnwrapPointer<DexFile>(data_target->operands[1]) == &dex_file) { |
Fred Shih | e7f82e2 | 2014-08-06 10:46:37 -0700 | [diff] [blame] | 439 | return data_target; |
| 440 | } |
| 441 | data_target = data_target->next; |
| 442 | } |
| 443 | return nullptr; |
| 444 | } |
| 445 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 446 | /* |
| 447 | * The following are building blocks to insert constants into the pool or |
| 448 | * instruction streams. |
| 449 | */ |
| 450 | |
| 451 | /* Add a 32-bit constant to the constant pool */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 452 | LIR* Mir2Lir::AddWordData(LIR* *constant_list_p, int value) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 453 | /* Add the constant to the literal pool */ |
| 454 | if (constant_list_p) { |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 455 | LIR* new_value = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocData)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 456 | new_value->operands[0] = value; |
| 457 | new_value->next = *constant_list_p; |
| 458 | *constant_list_p = new_value; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 459 | estimated_native_code_size_ += sizeof(value); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 460 | return new_value; |
| 461 | } |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 462 | return nullptr; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 463 | } |
| 464 | |
| 465 | /* Add a 64-bit constant to the constant pool or mixed with code */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 466 | LIR* Mir2Lir::AddWideData(LIR* *constant_list_p, int val_lo, int val_hi) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 467 | AddWordData(constant_list_p, val_hi); |
| 468 | return AddWordData(constant_list_p, val_lo); |
| 469 | } |
| 470 | |
Matteo Franchin | 27cc093 | 2014-09-08 18:29:24 +0100 | [diff] [blame] | 471 | /** |
| 472 | * @brief Push a compressed reference which needs patching at link/patchoat-time. |
| 473 | * @details This needs to be kept consistent with the code which actually does the patching in |
| 474 | * oat_writer.cc and in the patchoat tool. |
| 475 | */ |
Vladimir Marko | 80b96d1 | 2015-02-19 15:50:28 +0000 | [diff] [blame] | 476 | static void PushUnpatchedReference(CodeBuffer* buf) { |
Matteo Franchin | 27cc093 | 2014-09-08 18:29:24 +0100 | [diff] [blame] | 477 | // Note that we can safely initialize the patches to zero. The code deduplication mechanism takes |
| 478 | // the patches into account when determining whether two pieces of codes are functionally |
| 479 | // equivalent. |
| 480 | Push32(buf, UINT32_C(0)); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 481 | } |
| 482 | |
Vladimir Marko | 80b96d1 | 2015-02-19 15:50:28 +0000 | [diff] [blame] | 483 | static void AlignBuffer(CodeBuffer* buf, size_t offset) { |
| 484 | DCHECK_LE(buf->size(), offset); |
| 485 | buf->insert(buf->end(), offset - buf->size(), 0u); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 486 | } |
| 487 | |
| 488 | /* Write the literal pool to the output stream */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 489 | void Mir2Lir::InstallLiteralPools() { |
Vladimir Marko | 80b96d1 | 2015-02-19 15:50:28 +0000 | [diff] [blame] | 490 | AlignBuffer(&code_buffer_, data_offset_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 491 | LIR* data_lir = literal_list_; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 492 | while (data_lir != nullptr) { |
Vladimir Marko | 80b96d1 | 2015-02-19 15:50:28 +0000 | [diff] [blame] | 493 | Push32(&code_buffer_, data_lir->operands[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 494 | data_lir = NEXT_LIR(data_lir); |
| 495 | } |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 496 | // TODO: patches_.reserve() as needed. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 497 | // Push code and method literals, record offsets for the compiler to patch. |
| 498 | data_lir = code_literal_list_; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 499 | while (data_lir != nullptr) { |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 500 | uint32_t target_method_idx = data_lir->operands[0]; |
Vladimir Marko | f6737f7 | 2015-03-23 17:05:14 +0000 | [diff] [blame] | 501 | const DexFile* target_dex_file = UnwrapPointer<DexFile>(data_lir->operands[1]); |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 502 | patches_.push_back(LinkerPatch::CodePatch(code_buffer_.size(), |
| 503 | target_dex_file, target_method_idx)); |
Vladimir Marko | 80b96d1 | 2015-02-19 15:50:28 +0000 | [diff] [blame] | 504 | PushUnpatchedReference(&code_buffer_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 505 | data_lir = NEXT_LIR(data_lir); |
| 506 | } |
| 507 | data_lir = method_literal_list_; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 508 | while (data_lir != nullptr) { |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 509 | uint32_t target_method_idx = data_lir->operands[0]; |
Vladimir Marko | f6737f7 | 2015-03-23 17:05:14 +0000 | [diff] [blame] | 510 | const DexFile* target_dex_file = UnwrapPointer<DexFile>(data_lir->operands[1]); |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 511 | patches_.push_back(LinkerPatch::MethodPatch(code_buffer_.size(), |
| 512 | target_dex_file, target_method_idx)); |
Vladimir Marko | 80b96d1 | 2015-02-19 15:50:28 +0000 | [diff] [blame] | 513 | PushUnpatchedReference(&code_buffer_); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 514 | data_lir = NEXT_LIR(data_lir); |
| 515 | } |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 516 | // Push class literals. |
| 517 | data_lir = class_literal_list_; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 518 | while (data_lir != nullptr) { |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 519 | uint32_t target_type_idx = data_lir->operands[0]; |
Vladimir Marko | f6737f7 | 2015-03-23 17:05:14 +0000 | [diff] [blame] | 520 | const DexFile* class_dex_file = UnwrapPointer<DexFile>(data_lir->operands[1]); |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 521 | patches_.push_back(LinkerPatch::TypePatch(code_buffer_.size(), |
| 522 | class_dex_file, target_type_idx)); |
Vladimir Marko | 80b96d1 | 2015-02-19 15:50:28 +0000 | [diff] [blame] | 523 | PushUnpatchedReference(&code_buffer_); |
Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 524 | data_lir = NEXT_LIR(data_lir); |
| 525 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 526 | } |
| 527 | |
| 528 | /* Write the switch tables to the output stream */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 529 | void Mir2Lir::InstallSwitchTables() { |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 530 | for (Mir2Lir::SwitchTable* tab_rec : switch_tables_) { |
Vladimir Marko | 80b96d1 | 2015-02-19 15:50:28 +0000 | [diff] [blame] | 531 | AlignBuffer(&code_buffer_, tab_rec->offset); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 532 | /* |
| 533 | * For Arm, our reference point is the address of the bx |
| 534 | * instruction that does the launch, so we have to subtract |
| 535 | * the auto pc-advance. For other targets the reference point |
| 536 | * is a label, so we can use the offset as-is. |
| 537 | */ |
| 538 | int bx_offset = INVALID_OFFSET; |
| 539 | switch (cu_->instruction_set) { |
| 540 | case kThumb2: |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 541 | DCHECK(tab_rec->anchor->flags.fixup != kFixupNone); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 542 | bx_offset = tab_rec->anchor->offset + 4; |
| 543 | break; |
| 544 | case kX86: |
| 545 | bx_offset = 0; |
| 546 | break; |
Mark Mendell | 27dee8b | 2014-12-01 19:06:12 -0500 | [diff] [blame] | 547 | case kX86_64: |
| 548 | // RIP relative to switch table. |
| 549 | bx_offset = tab_rec->offset; |
| 550 | break; |
Matteo Franchin | e45fb9e | 2014-05-06 10:10:30 +0100 | [diff] [blame] | 551 | case kArm64: |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 552 | case kMips: |
Maja Gagic | 6ea651f | 2015-02-24 16:55:04 +0100 | [diff] [blame] | 553 | case kMips64: |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 554 | bx_offset = tab_rec->anchor->offset; |
| 555 | break; |
| 556 | default: LOG(FATAL) << "Unexpected instruction set: " << cu_->instruction_set; |
| 557 | } |
| 558 | if (cu_->verbose) { |
| 559 | LOG(INFO) << "Switch table for offset 0x" << std::hex << bx_offset; |
| 560 | } |
| 561 | if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) { |
Chao-ying Fu | 72f53af | 2014-11-11 16:48:40 -0800 | [diff] [blame] | 562 | DCHECK(tab_rec->switch_mir != nullptr); |
| 563 | BasicBlock* bb = mir_graph_->GetBasicBlock(tab_rec->switch_mir->bb); |
| 564 | DCHECK(bb != nullptr); |
| 565 | int elems = 0; |
| 566 | for (SuccessorBlockInfo* successor_block_info : bb->successor_blocks) { |
| 567 | int key = successor_block_info->key; |
| 568 | int target = successor_block_info->block; |
| 569 | LIR* boundary_lir = InsertCaseLabel(target, key); |
| 570 | DCHECK(boundary_lir != nullptr); |
| 571 | int disp = boundary_lir->offset - bx_offset; |
Vladimir Marko | 80b96d1 | 2015-02-19 15:50:28 +0000 | [diff] [blame] | 572 | Push32(&code_buffer_, key); |
| 573 | Push32(&code_buffer_, disp); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 574 | if (cu_->verbose) { |
| 575 | LOG(INFO) << " Case[" << elems << "] key: 0x" |
Chao-ying Fu | 72f53af | 2014-11-11 16:48:40 -0800 | [diff] [blame] | 576 | << std::hex << key << ", disp: 0x" |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 577 | << std::hex << disp; |
| 578 | } |
Chao-ying Fu | 72f53af | 2014-11-11 16:48:40 -0800 | [diff] [blame] | 579 | elems++; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 580 | } |
Chao-ying Fu | 72f53af | 2014-11-11 16:48:40 -0800 | [diff] [blame] | 581 | DCHECK_EQ(elems, tab_rec->table[1]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 582 | } else { |
| 583 | DCHECK_EQ(static_cast<int>(tab_rec->table[0]), |
| 584 | static_cast<int>(Instruction::kPackedSwitchSignature)); |
Chao-ying Fu | 72f53af | 2014-11-11 16:48:40 -0800 | [diff] [blame] | 585 | DCHECK(tab_rec->switch_mir != nullptr); |
| 586 | BasicBlock* bb = mir_graph_->GetBasicBlock(tab_rec->switch_mir->bb); |
| 587 | DCHECK(bb != nullptr); |
| 588 | int elems = 0; |
| 589 | int low_key = s4FromSwitchData(&tab_rec->table[2]); |
| 590 | for (SuccessorBlockInfo* successor_block_info : bb->successor_blocks) { |
| 591 | int key = successor_block_info->key; |
| 592 | DCHECK_EQ(elems + low_key, key); |
| 593 | int target = successor_block_info->block; |
| 594 | LIR* boundary_lir = InsertCaseLabel(target, key); |
| 595 | DCHECK(boundary_lir != nullptr); |
| 596 | int disp = boundary_lir->offset - bx_offset; |
Vladimir Marko | 80b96d1 | 2015-02-19 15:50:28 +0000 | [diff] [blame] | 597 | Push32(&code_buffer_, disp); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 598 | if (cu_->verbose) { |
| 599 | LOG(INFO) << " Case[" << elems << "] disp: 0x" |
| 600 | << std::hex << disp; |
| 601 | } |
Chao-ying Fu | 72f53af | 2014-11-11 16:48:40 -0800 | [diff] [blame] | 602 | elems++; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 603 | } |
Chao-ying Fu | 72f53af | 2014-11-11 16:48:40 -0800 | [diff] [blame] | 604 | DCHECK_EQ(elems, tab_rec->table[1]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 605 | } |
| 606 | } |
| 607 | } |
| 608 | |
| 609 | /* Write the fill array dta to the output stream */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 610 | void Mir2Lir::InstallFillArrayData() { |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 611 | for (Mir2Lir::FillArrayData* tab_rec : fill_array_data_) { |
Vladimir Marko | 80b96d1 | 2015-02-19 15:50:28 +0000 | [diff] [blame] | 612 | AlignBuffer(&code_buffer_, tab_rec->offset); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 613 | for (int i = 0; i < (tab_rec->size + 1) / 2; i++) { |
Brian Carlstrom | df62950 | 2013-07-17 22:39:56 -0700 | [diff] [blame] | 614 | code_buffer_.push_back(tab_rec->table[i] & 0xFF); |
| 615 | code_buffer_.push_back((tab_rec->table[i] >> 8) & 0xFF); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 616 | } |
| 617 | } |
| 618 | } |
| 619 | |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 620 | static int AssignLiteralOffsetCommon(LIR* lir, CodeOffset offset) { |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 621 | for (; lir != nullptr; lir = lir->next) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 622 | lir->offset = offset; |
| 623 | offset += 4; |
| 624 | } |
| 625 | return offset; |
| 626 | } |
| 627 | |
Ian Rogers | ff093b3 | 2014-04-30 19:04:27 -0700 | [diff] [blame] | 628 | static int AssignLiteralPointerOffsetCommon(LIR* lir, CodeOffset offset, |
| 629 | unsigned int element_size) { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 630 | // Align to natural pointer size. |
Andreas Gampe | 6601882 | 2014-05-05 20:47:19 -0700 | [diff] [blame] | 631 | offset = RoundUp(offset, element_size); |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 632 | for (; lir != nullptr; lir = lir->next) { |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 633 | lir->offset = offset; |
| 634 | offset += element_size; |
| 635 | } |
| 636 | return offset; |
| 637 | } |
| 638 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 639 | // Make sure we have a code address for every declared catch entry |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 640 | bool Mir2Lir::VerifyCatchEntries() { |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 641 | MappingTable table(&encoded_mapping_table_[0]); |
| 642 | std::vector<uint32_t> dex_pcs; |
| 643 | dex_pcs.reserve(table.DexToPcSize()); |
| 644 | for (auto it = table.DexToPcBegin(), end = table.DexToPcEnd(); it != end; ++it) { |
| 645 | dex_pcs.push_back(it.DexPc()); |
| 646 | } |
| 647 | // Sort dex_pcs, so that we can quickly check it against the ordered mir_graph_->catches_. |
| 648 | std::sort(dex_pcs.begin(), dex_pcs.end()); |
| 649 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 650 | bool success = true; |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 651 | auto it = dex_pcs.begin(), end = dex_pcs.end(); |
| 652 | for (uint32_t dex_pc : mir_graph_->catches_) { |
| 653 | while (it != end && *it < dex_pc) { |
| 654 | LOG(INFO) << "Unexpected catch entry @ dex pc 0x" << std::hex << *it; |
| 655 | ++it; |
| 656 | success = false; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 657 | } |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 658 | if (it == end || *it > dex_pc) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 659 | LOG(INFO) << "Missing native PC for catch entry @ 0x" << std::hex << dex_pc; |
| 660 | success = false; |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 661 | } else { |
| 662 | ++it; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 663 | } |
| 664 | } |
| 665 | if (!success) { |
| 666 | LOG(INFO) << "Bad dex2pcMapping table in " << PrettyMethod(cu_->method_idx, *cu_->dex_file); |
| 667 | LOG(INFO) << "Entries @ decode: " << mir_graph_->catches_.size() << ", Entries in table: " |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 668 | << table.DexToPcSize(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 669 | } |
| 670 | return success; |
| 671 | } |
| 672 | |
| 673 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 674 | void Mir2Lir::CreateMappingTables() { |
Yevgeny Rouban | e3ea838 | 2014-08-08 16:29:38 +0700 | [diff] [blame] | 675 | bool generate_src_map = cu_->compiler_driver->GetCompilerOptions().GetIncludeDebugSymbols(); |
| 676 | |
Vladimir Marko | 1e6cb63 | 2013-11-28 16:27:29 +0000 | [diff] [blame] | 677 | uint32_t pc2dex_data_size = 0u; |
| 678 | uint32_t pc2dex_entries = 0u; |
| 679 | uint32_t pc2dex_offset = 0u; |
| 680 | uint32_t pc2dex_dalvik_offset = 0u; |
Yevgeny Rouban | e3ea838 | 2014-08-08 16:29:38 +0700 | [diff] [blame] | 681 | uint32_t pc2dex_src_entries = 0u; |
Vladimir Marko | 1e6cb63 | 2013-11-28 16:27:29 +0000 | [diff] [blame] | 682 | uint32_t dex2pc_data_size = 0u; |
| 683 | uint32_t dex2pc_entries = 0u; |
| 684 | uint32_t dex2pc_offset = 0u; |
| 685 | uint32_t dex2pc_dalvik_offset = 0u; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 686 | for (LIR* tgt_lir = first_lir_insn_; tgt_lir != nullptr; tgt_lir = NEXT_LIR(tgt_lir)) { |
Yevgeny Rouban | e3ea838 | 2014-08-08 16:29:38 +0700 | [diff] [blame] | 687 | pc2dex_src_entries++; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 688 | if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) { |
Vladimir Marko | 1e6cb63 | 2013-11-28 16:27:29 +0000 | [diff] [blame] | 689 | pc2dex_entries += 1; |
| 690 | DCHECK(pc2dex_offset <= tgt_lir->offset); |
| 691 | pc2dex_data_size += UnsignedLeb128Size(tgt_lir->offset - pc2dex_offset); |
| 692 | pc2dex_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) - |
| 693 | static_cast<int32_t>(pc2dex_dalvik_offset)); |
| 694 | pc2dex_offset = tgt_lir->offset; |
| 695 | pc2dex_dalvik_offset = tgt_lir->dalvik_offset; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 696 | } |
| 697 | if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) { |
Vladimir Marko | 1e6cb63 | 2013-11-28 16:27:29 +0000 | [diff] [blame] | 698 | dex2pc_entries += 1; |
| 699 | DCHECK(dex2pc_offset <= tgt_lir->offset); |
| 700 | dex2pc_data_size += UnsignedLeb128Size(tgt_lir->offset - dex2pc_offset); |
| 701 | dex2pc_data_size += SignedLeb128Size(static_cast<int32_t>(tgt_lir->dalvik_offset) - |
| 702 | static_cast<int32_t>(dex2pc_dalvik_offset)); |
| 703 | dex2pc_offset = tgt_lir->offset; |
| 704 | dex2pc_dalvik_offset = tgt_lir->dalvik_offset; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 705 | } |
| 706 | } |
Vladimir Marko | 1e6cb63 | 2013-11-28 16:27:29 +0000 | [diff] [blame] | 707 | |
Yevgeny Rouban | e3ea838 | 2014-08-08 16:29:38 +0700 | [diff] [blame] | 708 | if (generate_src_map) { |
| 709 | src_mapping_table_.reserve(pc2dex_src_entries); |
| 710 | } |
| 711 | |
Vladimir Marko | 1e6cb63 | 2013-11-28 16:27:29 +0000 | [diff] [blame] | 712 | uint32_t total_entries = pc2dex_entries + dex2pc_entries; |
| 713 | uint32_t hdr_data_size = UnsignedLeb128Size(total_entries) + UnsignedLeb128Size(pc2dex_entries); |
| 714 | uint32_t data_size = hdr_data_size + pc2dex_data_size + dex2pc_data_size; |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 715 | encoded_mapping_table_.resize(data_size); |
| 716 | uint8_t* write_pos = &encoded_mapping_table_[0]; |
| 717 | write_pos = EncodeUnsignedLeb128(write_pos, total_entries); |
| 718 | write_pos = EncodeUnsignedLeb128(write_pos, pc2dex_entries); |
| 719 | DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]), hdr_data_size); |
| 720 | uint8_t* write_pos2 = write_pos + pc2dex_data_size; |
Vladimir Marko | 1e6cb63 | 2013-11-28 16:27:29 +0000 | [diff] [blame] | 721 | |
David Srbecky | 6f71589 | 2015-03-30 14:21:42 +0100 | [diff] [blame] | 722 | bool is_in_prologue_or_epilogue = false; |
Vladimir Marko | 1e6cb63 | 2013-11-28 16:27:29 +0000 | [diff] [blame] | 723 | pc2dex_offset = 0u; |
| 724 | pc2dex_dalvik_offset = 0u; |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 725 | dex2pc_offset = 0u; |
| 726 | dex2pc_dalvik_offset = 0u; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 727 | for (LIR* tgt_lir = first_lir_insn_; tgt_lir != nullptr; tgt_lir = NEXT_LIR(tgt_lir)) { |
David Srbecky | 6f71589 | 2015-03-30 14:21:42 +0100 | [diff] [blame] | 728 | if (generate_src_map && !tgt_lir->flags.is_nop && tgt_lir->opcode >= 0) { |
| 729 | if (!is_in_prologue_or_epilogue) { |
| 730 | src_mapping_table_.push_back(SrcMapElem({tgt_lir->offset, |
| 731 | static_cast<int32_t>(tgt_lir->dalvik_offset)})); |
| 732 | } |
Yevgeny Rouban | e3ea838 | 2014-08-08 16:29:38 +0700 | [diff] [blame] | 733 | } |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 734 | if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) { |
| 735 | DCHECK(pc2dex_offset <= tgt_lir->offset); |
| 736 | write_pos = EncodeUnsignedLeb128(write_pos, tgt_lir->offset - pc2dex_offset); |
| 737 | write_pos = EncodeSignedLeb128(write_pos, static_cast<int32_t>(tgt_lir->dalvik_offset) - |
| 738 | static_cast<int32_t>(pc2dex_dalvik_offset)); |
| 739 | pc2dex_offset = tgt_lir->offset; |
| 740 | pc2dex_dalvik_offset = tgt_lir->dalvik_offset; |
| 741 | } |
| 742 | if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) { |
| 743 | DCHECK(dex2pc_offset <= tgt_lir->offset); |
| 744 | write_pos2 = EncodeUnsignedLeb128(write_pos2, tgt_lir->offset - dex2pc_offset); |
| 745 | write_pos2 = EncodeSignedLeb128(write_pos2, static_cast<int32_t>(tgt_lir->dalvik_offset) - |
| 746 | static_cast<int32_t>(dex2pc_dalvik_offset)); |
| 747 | dex2pc_offset = tgt_lir->offset; |
| 748 | dex2pc_dalvik_offset = tgt_lir->dalvik_offset; |
| 749 | } |
David Srbecky | 6f71589 | 2015-03-30 14:21:42 +0100 | [diff] [blame] | 750 | if (tgt_lir->opcode == kPseudoPrologueBegin || tgt_lir->opcode == kPseudoEpilogueBegin) { |
| 751 | is_in_prologue_or_epilogue = true; |
| 752 | } |
| 753 | if (tgt_lir->opcode == kPseudoPrologueEnd || tgt_lir->opcode == kPseudoEpilogueEnd) { |
| 754 | is_in_prologue_or_epilogue = false; |
| 755 | } |
Vladimir Marko | 1e6cb63 | 2013-11-28 16:27:29 +0000 | [diff] [blame] | 756 | } |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 757 | DCHECK_EQ(static_cast<size_t>(write_pos - &encoded_mapping_table_[0]), |
| 758 | hdr_data_size + pc2dex_data_size); |
| 759 | DCHECK_EQ(static_cast<size_t>(write_pos2 - &encoded_mapping_table_[0]), data_size); |
Vladimir Marko | 1e6cb63 | 2013-11-28 16:27:29 +0000 | [diff] [blame] | 760 | |
Ian Rogers | 96faf5b | 2013-08-09 22:05:32 -0700 | [diff] [blame] | 761 | if (kIsDebugBuild) { |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 762 | CHECK(VerifyCatchEntries()); |
| 763 | |
Ian Rogers | 96faf5b | 2013-08-09 22:05:32 -0700 | [diff] [blame] | 764 | // Verify the encoded table holds the expected data. |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 765 | MappingTable table(&encoded_mapping_table_[0]); |
Ian Rogers | 96faf5b | 2013-08-09 22:05:32 -0700 | [diff] [blame] | 766 | CHECK_EQ(table.TotalSize(), total_entries); |
| 767 | CHECK_EQ(table.PcToDexSize(), pc2dex_entries); |
Vladimir Marko | 1e6cb63 | 2013-11-28 16:27:29 +0000 | [diff] [blame] | 768 | auto it = table.PcToDexBegin(); |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 769 | auto it2 = table.DexToPcBegin(); |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 770 | for (LIR* tgt_lir = first_lir_insn_; tgt_lir != nullptr; tgt_lir = NEXT_LIR(tgt_lir)) { |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 771 | if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoSafepointPC)) { |
| 772 | CHECK_EQ(tgt_lir->offset, it.NativePcOffset()); |
| 773 | CHECK_EQ(tgt_lir->dalvik_offset, it.DexPc()); |
| 774 | ++it; |
| 775 | } |
| 776 | if (!tgt_lir->flags.is_nop && (tgt_lir->opcode == kPseudoExportedPC)) { |
| 777 | CHECK_EQ(tgt_lir->offset, it2.NativePcOffset()); |
| 778 | CHECK_EQ(tgt_lir->dalvik_offset, it2.DexPc()); |
| 779 | ++it2; |
| 780 | } |
Ian Rogers | 96faf5b | 2013-08-09 22:05:32 -0700 | [diff] [blame] | 781 | } |
Vladimir Marko | 1e6cb63 | 2013-11-28 16:27:29 +0000 | [diff] [blame] | 782 | CHECK(it == table.PcToDexEnd()); |
Vladimir Marko | 1e6cb63 | 2013-11-28 16:27:29 +0000 | [diff] [blame] | 783 | CHECK(it2 == table.DexToPcEnd()); |
Ian Rogers | 96faf5b | 2013-08-09 22:05:32 -0700 | [diff] [blame] | 784 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 785 | } |
| 786 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 787 | void Mir2Lir::CreateNativeGcMap() { |
Vladimir Marko | 767c752 | 2015-03-20 12:47:30 +0000 | [diff] [blame] | 788 | if (UNLIKELY((cu_->disable_opt & (1u << kPromoteRegs)) != 0u)) { |
| 789 | // If we're not promoting to physical registers, it's safe to use the verifier's notion of |
| 790 | // references. (We disable register promotion when type inference finds a type conflict and |
| 791 | // in that the case we defer to the verifier to avoid using the compiler's conflicting info.) |
| 792 | CreateNativeGcMapWithoutRegisterPromotion(); |
| 793 | return; |
| 794 | } |
| 795 | |
| 796 | ArenaBitVector* references = new (arena_) ArenaBitVector(arena_, mir_graph_->GetNumSSARegs(), |
| 797 | false); |
| 798 | |
| 799 | // Calculate max native offset and max reference vreg. |
| 800 | MIR* prev_mir = nullptr; |
| 801 | int max_ref_vreg = -1; |
| 802 | CodeOffset max_native_offset = 0u; |
| 803 | for (const auto& entry : safepoints_) { |
| 804 | uint32_t native_offset = entry.first->offset; |
| 805 | max_native_offset = std::max(max_native_offset, native_offset); |
| 806 | MIR* mir = entry.second; |
| 807 | UpdateReferenceVRegs(mir, prev_mir, references); |
| 808 | max_ref_vreg = std::max(max_ref_vreg, references->GetHighestBitSet()); |
| 809 | prev_mir = mir; |
| 810 | } |
| 811 | |
Vladimir Marko | 6e07183 | 2015-03-25 11:13:39 +0000 | [diff] [blame] | 812 | #if defined(BYTE_ORDER) && (BYTE_ORDER == LITTLE_ENDIAN) |
| 813 | static constexpr bool kLittleEndian = true; |
| 814 | #else |
| 815 | static constexpr bool kLittleEndian = false; |
| 816 | #endif |
| 817 | |
Vladimir Marko | 767c752 | 2015-03-20 12:47:30 +0000 | [diff] [blame] | 818 | // Build the GC map. |
| 819 | uint32_t reg_width = static_cast<uint32_t>((max_ref_vreg + 8) / 8); |
| 820 | GcMapBuilder native_gc_map_builder(&native_gc_map_, |
| 821 | safepoints_.size(), |
| 822 | max_native_offset, reg_width); |
Vladimir Marko | 6e07183 | 2015-03-25 11:13:39 +0000 | [diff] [blame] | 823 | if (kLittleEndian) { |
| 824 | for (const auto& entry : safepoints_) { |
| 825 | uint32_t native_offset = entry.first->offset; |
| 826 | MIR* mir = entry.second; |
| 827 | UpdateReferenceVRegs(mir, prev_mir, references); |
| 828 | // For little-endian, the bytes comprising the bit vector's raw storage are what we need. |
| 829 | native_gc_map_builder.AddEntry(native_offset, |
| 830 | reinterpret_cast<const uint8_t*>(references->GetRawStorage())); |
| 831 | prev_mir = mir; |
Vladimir Marko | 767c752 | 2015-03-20 12:47:30 +0000 | [diff] [blame] | 832 | } |
Vladimir Marko | 6e07183 | 2015-03-25 11:13:39 +0000 | [diff] [blame] | 833 | } else { |
| 834 | ArenaVector<uint8_t> references_buffer(arena_->Adapter()); |
| 835 | references_buffer.resize(reg_width); |
| 836 | for (const auto& entry : safepoints_) { |
| 837 | uint32_t native_offset = entry.first->offset; |
| 838 | MIR* mir = entry.second; |
| 839 | UpdateReferenceVRegs(mir, prev_mir, references); |
| 840 | // Big-endian or unknown endianness, manually translate the bit vector data. |
| 841 | const auto* raw_storage = references->GetRawStorage(); |
| 842 | for (size_t i = 0; i != reg_width; ++i) { |
| 843 | references_buffer[i] = static_cast<uint8_t>( |
| 844 | raw_storage[i / sizeof(raw_storage[0])] >> (8u * (i % sizeof(raw_storage[0])))); |
| 845 | } |
| 846 | native_gc_map_builder.AddEntry(native_offset, &references_buffer[0]); |
| 847 | prev_mir = mir; |
| 848 | } |
Vladimir Marko | 767c752 | 2015-03-20 12:47:30 +0000 | [diff] [blame] | 849 | } |
| 850 | } |
| 851 | |
| 852 | void Mir2Lir::CreateNativeGcMapWithoutRegisterPromotion() { |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 853 | DCHECK(!encoded_mapping_table_.empty()); |
| 854 | MappingTable mapping_table(&encoded_mapping_table_[0]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 855 | uint32_t max_native_offset = 0; |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 856 | for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) { |
| 857 | uint32_t native_offset = it.NativePcOffset(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 858 | if (native_offset > max_native_offset) { |
| 859 | max_native_offset = native_offset; |
| 860 | } |
| 861 | } |
| 862 | MethodReference method_ref(cu_->dex_file, cu_->method_idx); |
Vladimir Marko | 2730db0 | 2014-01-27 11:15:17 +0000 | [diff] [blame] | 863 | const std::vector<uint8_t>& gc_map_raw = |
| 864 | mir_graph_->GetCurrentDexCompilationUnit()->GetVerifiedMethod()->GetDexGcMap(); |
| 865 | verifier::DexPcToReferenceMap dex_gc_map(&(gc_map_raw)[0]); |
| 866 | DCHECK_EQ(gc_map_raw.size(), dex_gc_map.RawSize()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 867 | // Compute native offset to references size. |
Nicolas Geoffray | 92cf83e | 2014-03-18 17:59:20 +0000 | [diff] [blame] | 868 | GcMapBuilder native_gc_map_builder(&native_gc_map_, |
| 869 | mapping_table.PcToDexSize(), |
| 870 | max_native_offset, dex_gc_map.RegWidth()); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 871 | |
Vladimir Marko | 06606b9 | 2013-12-02 15:31:08 +0000 | [diff] [blame] | 872 | for (auto it = mapping_table.PcToDexBegin(), end = mapping_table.PcToDexEnd(); it != end; ++it) { |
| 873 | uint32_t native_offset = it.NativePcOffset(); |
| 874 | uint32_t dex_pc = it.DexPc(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 875 | const uint8_t* references = dex_gc_map.FindBitMap(dex_pc, false); |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 876 | CHECK(references != nullptr) << "Missing ref for dex pc 0x" << std::hex << dex_pc << |
Dave Allison | f943914 | 2014-03-27 15:10:22 -0700 | [diff] [blame] | 877 | ": " << PrettyMethod(cu_->method_idx, *cu_->dex_file); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 878 | native_gc_map_builder.AddEntry(native_offset, references); |
| 879 | } |
Mathieu Chartier | ab972ef | 2014-12-03 17:38:22 -0800 | [diff] [blame] | 880 | |
| 881 | // Maybe not necessary, but this could help prevent errors where we access the verified method |
| 882 | // after it has been deleted. |
| 883 | mir_graph_->GetCurrentDexCompilationUnit()->ClearVerifiedMethod(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 884 | } |
| 885 | |
| 886 | /* Determine the offset of each literal field */ |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 887 | int Mir2Lir::AssignLiteralOffset(CodeOffset offset) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 888 | offset = AssignLiteralOffsetCommon(literal_list_, offset); |
Matteo Franchin | 27cc093 | 2014-09-08 18:29:24 +0100 | [diff] [blame] | 889 | constexpr unsigned int ptr_size = sizeof(uint32_t); |
Andreas Gampe | 785d2f2 | 2014-11-03 22:57:30 -0800 | [diff] [blame] | 890 | static_assert(ptr_size >= sizeof(mirror::HeapReference<mirror::Object>), |
| 891 | "Pointer size cannot hold a heap reference"); |
Ian Rogers | ff093b3 | 2014-04-30 19:04:27 -0700 | [diff] [blame] | 892 | offset = AssignLiteralPointerOffsetCommon(code_literal_list_, offset, ptr_size); |
| 893 | offset = AssignLiteralPointerOffsetCommon(method_literal_list_, offset, ptr_size); |
| 894 | offset = AssignLiteralPointerOffsetCommon(class_literal_list_, offset, ptr_size); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 895 | return offset; |
| 896 | } |
| 897 | |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 898 | int Mir2Lir::AssignSwitchTablesOffset(CodeOffset offset) { |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 899 | for (Mir2Lir::SwitchTable* tab_rec : switch_tables_) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 900 | tab_rec->offset = offset; |
| 901 | if (tab_rec->table[0] == Instruction::kSparseSwitchSignature) { |
| 902 | offset += tab_rec->table[1] * (sizeof(int) * 2); |
| 903 | } else { |
| 904 | DCHECK_EQ(static_cast<int>(tab_rec->table[0]), |
| 905 | static_cast<int>(Instruction::kPackedSwitchSignature)); |
| 906 | offset += tab_rec->table[1] * sizeof(int); |
| 907 | } |
| 908 | } |
| 909 | return offset; |
| 910 | } |
| 911 | |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 912 | int Mir2Lir::AssignFillArrayDataOffset(CodeOffset offset) { |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 913 | for (Mir2Lir::FillArrayData* tab_rec : fill_array_data_) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 914 | tab_rec->offset = offset; |
| 915 | offset += tab_rec->size; |
| 916 | // word align |
Andreas Gampe | 6601882 | 2014-05-05 20:47:19 -0700 | [diff] [blame] | 917 | offset = RoundUp(offset, 4); |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 918 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 919 | return offset; |
| 920 | } |
| 921 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 922 | /* |
| 923 | * Insert a kPseudoCaseLabel at the beginning of the Dalvik |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 924 | * offset vaddr if pretty-printing, otherise use the standard block |
| 925 | * label. The selected label will be used to fix up the case |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 926 | * branch table during the assembly phase. All resource flags |
| 927 | * are set to prevent code motion. KeyVal is just there for debugging. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 928 | */ |
Chao-ying Fu | 72f53af | 2014-11-11 16:48:40 -0800 | [diff] [blame] | 929 | LIR* Mir2Lir::InsertCaseLabel(uint32_t bbid, int keyVal) { |
| 930 | LIR* boundary_lir = &block_label_list_[bbid]; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 931 | LIR* res = boundary_lir; |
| 932 | if (cu_->verbose) { |
| 933 | // Only pay the expense if we're pretty-printing. |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 934 | LIR* new_label = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), kArenaAllocLIR)); |
Chao-ying Fu | 72f53af | 2014-11-11 16:48:40 -0800 | [diff] [blame] | 935 | BasicBlock* bb = mir_graph_->GetBasicBlock(bbid); |
| 936 | DCHECK(bb != nullptr); |
| 937 | new_label->dalvik_offset = bb->start_offset; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 938 | new_label->opcode = kPseudoCaseLabel; |
| 939 | new_label->operands[0] = keyVal; |
| 940 | new_label->flags.fixup = kFixupLabel; |
| 941 | DCHECK(!new_label->flags.use_def_invalid); |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 942 | new_label->u.m.def_mask = &kEncodeAll; |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 943 | InsertLIRAfter(boundary_lir, new_label); |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 944 | } |
| 945 | return res; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 946 | } |
| 947 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 948 | void Mir2Lir::DumpSparseSwitchTable(const uint16_t* table) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 949 | /* |
| 950 | * Sparse switch data format: |
| 951 | * ushort ident = 0x0200 magic value |
| 952 | * ushort size number of entries in the table; > 0 |
| 953 | * int keys[size] keys, sorted low-to-high; 32-bit aligned |
| 954 | * int targets[size] branch targets, relative to switch opcode |
| 955 | * |
| 956 | * Total size is (2+size*4) 16-bit code units. |
| 957 | */ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 958 | uint16_t ident = table[0]; |
| 959 | int entries = table[1]; |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 960 | const int32_t* keys = reinterpret_cast<const int32_t*>(&table[2]); |
| 961 | const int32_t* targets = &keys[entries]; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 962 | LOG(INFO) << "Sparse switch table - ident:0x" << std::hex << ident |
| 963 | << ", entries: " << std::dec << entries; |
| 964 | for (int i = 0; i < entries; i++) { |
| 965 | LOG(INFO) << " Key[" << keys[i] << "] -> 0x" << std::hex << targets[i]; |
| 966 | } |
| 967 | } |
| 968 | |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 969 | void Mir2Lir::DumpPackedSwitchTable(const uint16_t* table) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 970 | /* |
| 971 | * Packed switch data format: |
| 972 | * ushort ident = 0x0100 magic value |
| 973 | * ushort size number of entries in the table |
| 974 | * int first_key first (and lowest) switch case value |
| 975 | * int targets[size] branch targets, relative to switch opcode |
| 976 | * |
| 977 | * Total size is (4+size*2) 16-bit code units. |
| 978 | */ |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 979 | uint16_t ident = table[0]; |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 980 | const int32_t* targets = reinterpret_cast<const int32_t*>(&table[4]); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 981 | int entries = table[1]; |
| 982 | int low_key = s4FromSwitchData(&table[2]); |
| 983 | LOG(INFO) << "Packed switch table - ident:0x" << std::hex << ident |
| 984 | << ", entries: " << std::dec << entries << ", low_key: " << low_key; |
| 985 | for (int i = 0; i < entries; i++) { |
| 986 | LOG(INFO) << " Key[" << (i + low_key) << "] -> 0x" << std::hex |
| 987 | << targets[i]; |
| 988 | } |
| 989 | } |
| 990 | |
buzbee | 252254b | 2013-09-08 16:20:53 -0700 | [diff] [blame] | 991 | /* Set up special LIR to mark a Dalvik byte-code instruction start for pretty printing */ |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 992 | void Mir2Lir::MarkBoundary(DexOffset offset, const char* inst_str) { |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 993 | UNUSED(offset); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 994 | // NOTE: only used for debug listings. |
| 995 | NewLIR1(kPseudoDalvikByteCodeBoundary, WrapPointer(ArenaStrdup(inst_str))); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 996 | } |
| 997 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 998 | // Convert relation of src1/src2 to src2/src1 |
| 999 | ConditionCode Mir2Lir::FlipComparisonOrder(ConditionCode before) { |
| 1000 | ConditionCode res; |
| 1001 | switch (before) { |
| 1002 | case kCondEq: res = kCondEq; break; |
| 1003 | case kCondNe: res = kCondNe; break; |
| 1004 | case kCondLt: res = kCondGt; break; |
| 1005 | case kCondGt: res = kCondLt; break; |
| 1006 | case kCondLe: res = kCondGe; break; |
| 1007 | case kCondGe: res = kCondLe; break; |
| 1008 | default: |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1009 | LOG(FATAL) << "Unexpected ccode " << before; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1010 | UNREACHABLE(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1011 | } |
| 1012 | return res; |
| 1013 | } |
| 1014 | |
Vladimir Marko | a1a7074 | 2014-03-03 10:28:05 +0000 | [diff] [blame] | 1015 | ConditionCode Mir2Lir::NegateComparison(ConditionCode before) { |
| 1016 | ConditionCode res; |
| 1017 | switch (before) { |
| 1018 | case kCondEq: res = kCondNe; break; |
| 1019 | case kCondNe: res = kCondEq; break; |
| 1020 | case kCondLt: res = kCondGe; break; |
| 1021 | case kCondGt: res = kCondLe; break; |
| 1022 | case kCondLe: res = kCondGt; break; |
| 1023 | case kCondGe: res = kCondLt; break; |
| 1024 | default: |
Vladimir Marko | a1a7074 | 2014-03-03 10:28:05 +0000 | [diff] [blame] | 1025 | LOG(FATAL) << "Unexpected ccode " << before; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1026 | UNREACHABLE(); |
Vladimir Marko | a1a7074 | 2014-03-03 10:28:05 +0000 | [diff] [blame] | 1027 | } |
| 1028 | return res; |
| 1029 | } |
| 1030 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1031 | // TODO: move to mir_to_lir.cc |
| 1032 | Mir2Lir::Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena) |
Andreas Gampe | 9c46208 | 2015-01-27 14:31:40 -0800 | [diff] [blame] | 1033 | : literal_list_(nullptr), |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1034 | method_literal_list_(nullptr), |
| 1035 | class_literal_list_(nullptr), |
| 1036 | code_literal_list_(nullptr), |
| 1037 | first_fixup_(nullptr), |
Andreas Gampe | 9c46208 | 2015-01-27 14:31:40 -0800 | [diff] [blame] | 1038 | arena_(arena), |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1039 | cu_(cu), |
| 1040 | mir_graph_(mir_graph), |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 1041 | switch_tables_(arena->Adapter(kArenaAllocSwitchTable)), |
| 1042 | fill_array_data_(arena->Adapter(kArenaAllocFillArrayData)), |
| 1043 | tempreg_info_(arena->Adapter()), |
| 1044 | reginfo_map_(arena->Adapter()), |
| 1045 | pointer_storage_(arena->Adapter()), |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1046 | data_offset_(0), |
| 1047 | total_size_(0), |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1048 | block_label_list_(nullptr), |
| 1049 | promotion_map_(nullptr), |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1050 | current_dalvik_offset_(0), |
Vladimir Marko | 767c752 | 2015-03-20 12:47:30 +0000 | [diff] [blame] | 1051 | current_mir_(nullptr), |
buzbee | b48819d | 2013-09-14 16:15:25 -0700 | [diff] [blame] | 1052 | estimated_native_code_size_(0), |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 1053 | reg_pool_(nullptr), |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1054 | live_sreg_(0), |
Vladimir Marko | 80b96d1 | 2015-02-19 15:50:28 +0000 | [diff] [blame] | 1055 | code_buffer_(mir_graph->GetArena()->Adapter()), |
| 1056 | encoded_mapping_table_(mir_graph->GetArena()->Adapter()), |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 1057 | core_vmap_table_(mir_graph->GetArena()->Adapter()), |
| 1058 | fp_vmap_table_(mir_graph->GetArena()->Adapter()), |
Vladimir Marko | 80b96d1 | 2015-02-19 15:50:28 +0000 | [diff] [blame] | 1059 | native_gc_map_(mir_graph->GetArena()->Adapter()), |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 1060 | patches_(mir_graph->GetArena()->Adapter()), |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1061 | num_core_spills_(0), |
| 1062 | num_fp_spills_(0), |
| 1063 | frame_size_(0), |
| 1064 | core_spill_mask_(0), |
| 1065 | fp_spill_mask_(0), |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1066 | first_lir_insn_(nullptr), |
| 1067 | last_lir_insn_(nullptr), |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 1068 | slow_paths_(arena->Adapter(kArenaAllocSlowPaths)), |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 1069 | mem_ref_type_(ResourceMask::kHeapRef), |
Serguei Katkov | 717a3e4 | 2014-11-13 17:19:42 +0600 | [diff] [blame] | 1070 | mask_cache_(arena), |
Vladimir Marko | 767c752 | 2015-03-20 12:47:30 +0000 | [diff] [blame] | 1071 | safepoints_(arena->Adapter()), |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 1072 | dex_cache_arrays_layout_(cu->compiler_driver->GetDexCacheArraysLayout(cu->dex_file)), |
Serguei Katkov | 717a3e4 | 2014-11-13 17:19:42 +0600 | [diff] [blame] | 1073 | in_to_reg_storage_mapping_(arena) { |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 1074 | switch_tables_.reserve(4); |
| 1075 | fill_array_data_.reserve(4); |
| 1076 | tempreg_info_.reserve(20); |
| 1077 | reginfo_map_.reserve(RegStorage::kMaxRegs); |
| 1078 | pointer_storage_.reserve(128); |
| 1079 | slow_paths_.reserve(32); |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1080 | // Reserve pointer id 0 for nullptr. |
Vladimir Marko | f6737f7 | 2015-03-23 17:05:14 +0000 | [diff] [blame] | 1081 | size_t null_idx = WrapPointer<void>(nullptr); |
buzbee | 0d82948 | 2013-10-11 15:24:55 -0700 | [diff] [blame] | 1082 | DCHECK_EQ(null_idx, 0U); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1083 | } |
| 1084 | |
| 1085 | void Mir2Lir::Materialize() { |
buzbee | a61f495 | 2013-08-23 14:27:06 -0700 | [diff] [blame] | 1086 | cu_->NewTimingSplit("RegisterAllocation"); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1087 | CompilerInitializeRegAlloc(); // Needs to happen after SSA naming |
| 1088 | |
| 1089 | /* Allocate Registers using simple local allocation scheme */ |
| 1090 | SimpleRegAlloc(); |
| 1091 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1092 | /* First try the custom light codegen for special cases. */ |
Vladimir Marko | 5816ed4 | 2013-11-27 17:04:20 +0000 | [diff] [blame] | 1093 | DCHECK(cu_->compiler_driver->GetMethodInlinerMap() != nullptr); |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1094 | bool special_worked = cu_->compiler_driver->GetMethodInlinerMap()->GetMethodInliner(cu_->dex_file) |
Vladimir Marko | 5816ed4 | 2013-11-27 17:04:20 +0000 | [diff] [blame] | 1095 | ->GenSpecial(this, cu_->method_idx); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1096 | |
Razvan A Lupusoru | 3bc0174 | 2014-02-06 13:18:43 -0800 | [diff] [blame] | 1097 | /* Take normal path for converting MIR to LIR only if the special codegen did not succeed. */ |
| 1098 | if (special_worked == false) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1099 | MethodMIR2LIR(); |
| 1100 | } |
| 1101 | |
| 1102 | /* Method is not empty */ |
| 1103 | if (first_lir_insn_) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1104 | /* Convert LIR into machine code. */ |
| 1105 | AssembleLIR(); |
| 1106 | |
buzbee | b01bf15 | 2014-05-13 15:59:07 -0700 | [diff] [blame] | 1107 | if ((cu_->enable_debug & (1 << kDebugCodegenDump)) != 0) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1108 | CodegenDump(); |
| 1109 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1110 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1111 | } |
| 1112 | |
| 1113 | CompiledMethod* Mir2Lir::GetCompiledMethod() { |
Vladimir Marko | 2e589aa | 2014-02-25 17:53:53 +0000 | [diff] [blame] | 1114 | // Combine vmap tables - core regs, then fp regs - into vmap_table. |
| 1115 | Leb128EncodingVector vmap_encoder; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1116 | if (frame_size_ > 0) { |
Vladimir Marko | 2e589aa | 2014-02-25 17:53:53 +0000 | [diff] [blame] | 1117 | // Prefix the encoded data with its size. |
| 1118 | size_t size = core_vmap_table_.size() + 1 /* marker */ + fp_vmap_table_.size(); |
| 1119 | vmap_encoder.Reserve(size + 1u); // All values are likely to be one byte in ULEB128 (<128). |
| 1120 | vmap_encoder.PushBackUnsigned(size); |
| 1121 | // Core regs may have been inserted out of order - sort first. |
| 1122 | std::sort(core_vmap_table_.begin(), core_vmap_table_.end()); |
| 1123 | for (size_t i = 0 ; i < core_vmap_table_.size(); ++i) { |
| 1124 | // Copy, stripping out the phys register sort key. |
| 1125 | vmap_encoder.PushBackUnsigned( |
| 1126 | ~(-1 << VREG_NUM_WIDTH) & (core_vmap_table_[i] + VmapTable::kEntryAdjustment)); |
| 1127 | } |
| 1128 | // Push a marker to take place of lr. |
| 1129 | vmap_encoder.PushBackUnsigned(VmapTable::kAdjustedFpMarker); |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 1130 | if (cu_->instruction_set == kThumb2) { |
| 1131 | // fp regs already sorted. |
| 1132 | for (uint32_t i = 0; i < fp_vmap_table_.size(); i++) { |
| 1133 | vmap_encoder.PushBackUnsigned(fp_vmap_table_[i] + VmapTable::kEntryAdjustment); |
| 1134 | } |
| 1135 | } else { |
| 1136 | // For other platforms regs may have been inserted out of order - sort first. |
| 1137 | std::sort(fp_vmap_table_.begin(), fp_vmap_table_.end()); |
| 1138 | for (size_t i = 0 ; i < fp_vmap_table_.size(); ++i) { |
| 1139 | // Copy, stripping out the phys register sort key. |
| 1140 | vmap_encoder.PushBackUnsigned( |
| 1141 | ~(-1 << VREG_NUM_WIDTH) & (fp_vmap_table_[i] + VmapTable::kEntryAdjustment)); |
| 1142 | } |
Vladimir Marko | 2e589aa | 2014-02-25 17:53:53 +0000 | [diff] [blame] | 1143 | } |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1144 | } else { |
Vladimir Marko | 8194963 | 2014-05-02 11:53:22 +0100 | [diff] [blame] | 1145 | DCHECK_EQ(POPCOUNT(core_spill_mask_), 0); |
| 1146 | DCHECK_EQ(POPCOUNT(fp_spill_mask_), 0); |
Vladimir Marko | 2e589aa | 2014-02-25 17:53:53 +0000 | [diff] [blame] | 1147 | DCHECK_EQ(core_vmap_table_.size(), 0u); |
| 1148 | DCHECK_EQ(fp_vmap_table_.size(), 0u); |
| 1149 | vmap_encoder.PushBackUnsigned(0u); // Size is 0. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1150 | } |
Mark Mendell | ae9fd93 | 2014-02-10 16:14:35 -0800 | [diff] [blame] | 1151 | |
Vladimir Marko | f4da675 | 2014-08-01 19:04:18 +0100 | [diff] [blame] | 1152 | // Sort patches by literal offset for better deduplication. |
| 1153 | std::sort(patches_.begin(), patches_.end(), [](const LinkerPatch& lhs, const LinkerPatch& rhs) { |
| 1154 | return lhs.LiteralOffset() < rhs.LiteralOffset(); |
| 1155 | }); |
| 1156 | |
Andreas Gampe | d37f919 | 2015-03-04 14:00:56 -0800 | [diff] [blame] | 1157 | std::unique_ptr<std::vector<uint8_t>> cfi_info( |
| 1158 | cu_->compiler_driver->GetCompilerOptions().GetGenerateGDBInformation() ? |
| 1159 | ReturnFrameDescriptionEntry() : |
| 1160 | nullptr); |
Andreas Gampe | e21dc3d | 2014-12-08 16:59:43 -0800 | [diff] [blame] | 1161 | ArrayRef<const uint8_t> cfi_ref; |
| 1162 | if (cfi_info.get() != nullptr) { |
| 1163 | cfi_ref = ArrayRef<const uint8_t>(*cfi_info); |
| 1164 | } |
| 1165 | return CompiledMethod::SwapAllocCompiledMethod( |
| 1166 | cu_->compiler_driver, cu_->instruction_set, |
| 1167 | ArrayRef<const uint8_t>(code_buffer_), |
| 1168 | frame_size_, core_spill_mask_, fp_spill_mask_, |
| 1169 | &src_mapping_table_, |
| 1170 | ArrayRef<const uint8_t>(encoded_mapping_table_), |
| 1171 | ArrayRef<const uint8_t>(vmap_encoder.GetData()), |
| 1172 | ArrayRef<const uint8_t>(native_gc_map_), |
| 1173 | cfi_ref, |
Vladimir Marko | b207e14 | 2015-04-02 21:25:21 +0100 | [diff] [blame] | 1174 | ArrayRef<const LinkerPatch>(patches_)); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1175 | } |
| 1176 | |
Razvan A Lupusoru | da7a69b | 2014-01-08 15:09:50 -0800 | [diff] [blame] | 1177 | size_t Mir2Lir::GetMaxPossibleCompilerTemps() const { |
| 1178 | // Chose a reasonably small value in order to contain stack growth. |
| 1179 | // Backends that are smarter about spill region can return larger values. |
| 1180 | const size_t max_compiler_temps = 10; |
| 1181 | return max_compiler_temps; |
| 1182 | } |
| 1183 | |
| 1184 | size_t Mir2Lir::GetNumBytesForCompilerTempSpillRegion() { |
| 1185 | // By default assume that the Mir2Lir will need one slot for each temporary. |
| 1186 | // If the backend can better determine temps that have non-overlapping ranges and |
| 1187 | // temps that do not need spilled, it can actually provide a small region. |
Razvan A Lupusoru | 8d0d03e | 2014-06-06 17:04:52 -0700 | [diff] [blame] | 1188 | mir_graph_->CommitCompilerTemps(); |
| 1189 | return mir_graph_->GetNumBytesForSpecialTemps() + mir_graph_->GetMaximumBytesForNonSpecialTemps(); |
Razvan A Lupusoru | da7a69b | 2014-01-08 15:09:50 -0800 | [diff] [blame] | 1190 | } |
| 1191 | |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1192 | int Mir2Lir::ComputeFrameSize() { |
| 1193 | /* Figure out the frame size */ |
Dmitry Petrochenko | f29a424 | 2014-05-05 20:28:47 +0700 | [diff] [blame] | 1194 | uint32_t size = num_core_spills_ * GetBytesPerGprSpillLocation(cu_->instruction_set) |
| 1195 | + num_fp_spills_ * GetBytesPerFprSpillLocation(cu_->instruction_set) |
| 1196 | + sizeof(uint32_t) // Filler. |
Razvan A Lupusoru | 8d0d03e | 2014-06-06 17:04:52 -0700 | [diff] [blame] | 1197 | + mir_graph_->GetNumOfLocalCodeVRs() * sizeof(uint32_t) |
| 1198 | + mir_graph_->GetNumOfOutVRs() * sizeof(uint32_t) |
Dmitry Petrochenko | f29a424 | 2014-05-05 20:28:47 +0700 | [diff] [blame] | 1199 | + GetNumBytesForCompilerTempSpillRegion(); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1200 | /* Align and set */ |
Andreas Gampe | 6601882 | 2014-05-05 20:47:19 -0700 | [diff] [blame] | 1201 | return RoundUp(size, kStackAlignment); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1202 | } |
| 1203 | |
| 1204 | /* |
| 1205 | * Append an LIR instruction to the LIR list maintained by a compilation |
| 1206 | * unit |
| 1207 | */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1208 | void Mir2Lir::AppendLIR(LIR* lir) { |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1209 | if (first_lir_insn_ == nullptr) { |
| 1210 | DCHECK(last_lir_insn_ == nullptr); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1211 | last_lir_insn_ = first_lir_insn_ = lir; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1212 | lir->prev = lir->next = nullptr; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1213 | } else { |
| 1214 | last_lir_insn_->next = lir; |
| 1215 | lir->prev = last_lir_insn_; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1216 | lir->next = nullptr; |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1217 | last_lir_insn_ = lir; |
| 1218 | } |
| 1219 | } |
| 1220 | |
| 1221 | /* |
| 1222 | * Insert an LIR instruction before the current instruction, which cannot be the |
| 1223 | * first instruction. |
| 1224 | * |
| 1225 | * prev_lir <-> new_lir <-> current_lir |
| 1226 | */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1227 | void Mir2Lir::InsertLIRBefore(LIR* current_lir, LIR* new_lir) { |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1228 | DCHECK(current_lir->prev != nullptr); |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1229 | LIR *prev_lir = current_lir->prev; |
| 1230 | |
| 1231 | prev_lir->next = new_lir; |
| 1232 | new_lir->prev = prev_lir; |
| 1233 | new_lir->next = current_lir; |
| 1234 | current_lir->prev = new_lir; |
| 1235 | } |
| 1236 | |
| 1237 | /* |
| 1238 | * Insert an LIR instruction after the current instruction, which cannot be the |
Andreas Gampe | 3c12c51 | 2014-06-24 18:46:29 +0000 | [diff] [blame] | 1239 | * last instruction. |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1240 | * |
| 1241 | * current_lir -> new_lir -> old_next |
| 1242 | */ |
Brian Carlstrom | 2ce745c | 2013-07-17 17:44:30 -0700 | [diff] [blame] | 1243 | void Mir2Lir::InsertLIRAfter(LIR* current_lir, LIR* new_lir) { |
Brian Carlstrom | 7940e44 | 2013-07-12 13:46:57 -0700 | [diff] [blame] | 1244 | new_lir->prev = current_lir; |
| 1245 | new_lir->next = current_lir->next; |
| 1246 | current_lir->next = new_lir; |
| 1247 | new_lir->next->prev = new_lir; |
| 1248 | } |
| 1249 | |
Alexei Zavjalov | d8c3e36 | 2014-10-08 15:51:59 +0700 | [diff] [blame] | 1250 | bool Mir2Lir::PartiallyIntersects(RegLocation rl_src, RegLocation rl_dest) { |
Mark Mendell | 4708dcd | 2014-01-22 09:05:18 -0800 | [diff] [blame] | 1251 | DCHECK(rl_src.wide); |
| 1252 | DCHECK(rl_dest.wide); |
| 1253 | return (abs(mir_graph_->SRegToVReg(rl_src.s_reg_low) - mir_graph_->SRegToVReg(rl_dest.s_reg_low)) == 1); |
| 1254 | } |
| 1255 | |
Alexei Zavjalov | d8c3e36 | 2014-10-08 15:51:59 +0700 | [diff] [blame] | 1256 | bool Mir2Lir::Intersects(RegLocation rl_src, RegLocation rl_dest) { |
| 1257 | DCHECK(rl_src.wide); |
| 1258 | DCHECK(rl_dest.wide); |
| 1259 | return (abs(mir_graph_->SRegToVReg(rl_src.s_reg_low) - mir_graph_->SRegToVReg(rl_dest.s_reg_low)) <= 1); |
| 1260 | } |
| 1261 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1262 | LIR *Mir2Lir::OpCmpMemImmBranch(ConditionCode cond, RegStorage temp_reg, RegStorage base_reg, |
Dave Allison | 69dfe51 | 2014-07-11 17:11:58 +0000 | [diff] [blame] | 1263 | int offset, int check_value, LIR* target, LIR** compare) { |
Mark Mendell | 766e929 | 2014-01-27 07:55:47 -0800 | [diff] [blame] | 1264 | // Handle this for architectures that can't compare to memory. |
Dave Allison | 69dfe51 | 2014-07-11 17:11:58 +0000 | [diff] [blame] | 1265 | LIR* inst = Load32Disp(base_reg, offset, temp_reg); |
| 1266 | if (compare != nullptr) { |
| 1267 | *compare = inst; |
| 1268 | } |
Mark Mendell | 766e929 | 2014-01-27 07:55:47 -0800 | [diff] [blame] | 1269 | LIR* branch = OpCmpImmBranch(cond, temp_reg, check_value, target); |
| 1270 | return branch; |
| 1271 | } |
| 1272 | |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1273 | void Mir2Lir::AddSlowPath(LIRSlowPath* slowpath) { |
Vladimir Marko | e39c54e | 2014-09-22 14:50:02 +0100 | [diff] [blame] | 1274 | slow_paths_.push_back(slowpath); |
Serguei Katkov | 589e046 | 2014-09-05 18:37:22 +0700 | [diff] [blame] | 1275 | ResetDefTracking(); |
Dave Allison | bcec6fb | 2014-01-17 12:52:22 -0800 | [diff] [blame] | 1276 | } |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1277 | |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1278 | void Mir2Lir::LoadCodeAddress(const MethodReference& target_method, InvokeType type, |
| 1279 | SpecialTargetRegister symbolic_reg) { |
Vladimir Marko | a51a0b0 | 2014-05-21 12:08:39 +0100 | [diff] [blame] | 1280 | LIR* data_target = ScanLiteralPoolMethod(code_literal_list_, target_method); |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1281 | if (data_target == nullptr) { |
Vladimir Marko | a51a0b0 | 2014-05-21 12:08:39 +0100 | [diff] [blame] | 1282 | data_target = AddWordData(&code_literal_list_, target_method.dex_method_index); |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1283 | data_target->operands[1] = WrapPointer(const_cast<DexFile*>(target_method.dex_file)); |
Vladimir Marko | a51a0b0 | 2014-05-21 12:08:39 +0100 | [diff] [blame] | 1284 | // NOTE: The invoke type doesn't contribute to the literal identity. In fact, we can have |
| 1285 | // the same method invoked with kVirtual, kSuper and kInterface but the class linker will |
| 1286 | // resolve these invokes to the same method, so we don't care which one we record here. |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1287 | data_target->operands[2] = type; |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1288 | } |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 1289 | // Loads a code pointer. Code from oat file can be mapped anywhere. |
Vladimir Marko | f6737f7 | 2015-03-23 17:05:14 +0000 | [diff] [blame] | 1290 | OpPcRelLoad(TargetPtrReg(symbolic_reg), data_target); |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1291 | DCHECK_NE(cu_->instruction_set, kMips) << reinterpret_cast<void*>(data_target); |
Maja Gagic | 6ea651f | 2015-02-24 16:55:04 +0100 | [diff] [blame] | 1292 | DCHECK_NE(cu_->instruction_set, kMips64) << reinterpret_cast<void*>(data_target); |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1293 | } |
| 1294 | |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1295 | void Mir2Lir::LoadMethodAddress(const MethodReference& target_method, InvokeType type, |
| 1296 | SpecialTargetRegister symbolic_reg) { |
Vladimir Marko | a51a0b0 | 2014-05-21 12:08:39 +0100 | [diff] [blame] | 1297 | LIR* data_target = ScanLiteralPoolMethod(method_literal_list_, target_method); |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1298 | if (data_target == nullptr) { |
Vladimir Marko | a51a0b0 | 2014-05-21 12:08:39 +0100 | [diff] [blame] | 1299 | data_target = AddWordData(&method_literal_list_, target_method.dex_method_index); |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1300 | data_target->operands[1] = WrapPointer(const_cast<DexFile*>(target_method.dex_file)); |
Vladimir Marko | a51a0b0 | 2014-05-21 12:08:39 +0100 | [diff] [blame] | 1301 | // NOTE: The invoke type doesn't contribute to the literal identity. In fact, we can have |
| 1302 | // the same method invoked with kVirtual, kSuper and kInterface but the class linker will |
| 1303 | // resolve these invokes to the same method, so we don't care which one we record here. |
Jeff Hao | 49161ce | 2014-03-12 11:05:25 -0700 | [diff] [blame] | 1304 | data_target->operands[2] = type; |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1305 | } |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 1306 | // Loads an ArtMethod pointer, which is a reference as it lives in the heap. |
Vladimir Marko | f6737f7 | 2015-03-23 17:05:14 +0000 | [diff] [blame] | 1307 | OpPcRelLoad(TargetReg(symbolic_reg, kRef), data_target); |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1308 | DCHECK_NE(cu_->instruction_set, kMips) << reinterpret_cast<void*>(data_target); |
Maja Gagic | 6ea651f | 2015-02-24 16:55:04 +0100 | [diff] [blame] | 1309 | DCHECK_NE(cu_->instruction_set, kMips64) << reinterpret_cast<void*>(data_target); |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1310 | } |
| 1311 | |
Fred Shih | e7f82e2 | 2014-08-06 10:46:37 -0700 | [diff] [blame] | 1312 | void Mir2Lir::LoadClassType(const DexFile& dex_file, uint32_t type_idx, |
| 1313 | SpecialTargetRegister symbolic_reg) { |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1314 | // Use the literal pool and a PC-relative load from a data word. |
Fred Shih | e7f82e2 | 2014-08-06 10:46:37 -0700 | [diff] [blame] | 1315 | LIR* data_target = ScanLiteralPoolClass(class_literal_list_, dex_file, type_idx); |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1316 | if (data_target == nullptr) { |
| 1317 | data_target = AddWordData(&class_literal_list_, type_idx); |
Fred Shih | 4fc7853 | 2014-08-06 16:44:22 -0700 | [diff] [blame] | 1318 | data_target->operands[1] = WrapPointer(const_cast<DexFile*>(&dex_file)); |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1319 | } |
Chao-ying Fu | a77ee51 | 2014-07-01 17:43:41 -0700 | [diff] [blame] | 1320 | // Loads a Class pointer, which is a reference as it lives in the heap. |
Vladimir Marko | f6737f7 | 2015-03-23 17:05:14 +0000 | [diff] [blame] | 1321 | OpPcRelLoad(TargetReg(symbolic_reg, kRef), data_target); |
Mark Mendell | 55d0eac | 2014-02-06 11:02:52 -0800 | [diff] [blame] | 1322 | } |
| 1323 | |
Vladimir Marko | 20f8559 | 2015-03-19 10:07:02 +0000 | [diff] [blame] | 1324 | bool Mir2Lir::CanUseOpPcRelDexCacheArrayLoad() const { |
| 1325 | return false; |
| 1326 | } |
| 1327 | |
| 1328 | void Mir2Lir::OpPcRelDexCacheArrayLoad(const DexFile* dex_file ATTRIBUTE_UNUSED, |
| 1329 | int offset ATTRIBUTE_UNUSED, |
| 1330 | RegStorage r_dest ATTRIBUTE_UNUSED) { |
| 1331 | LOG(FATAL) << "No generic implementation."; |
| 1332 | UNREACHABLE(); |
| 1333 | } |
| 1334 | |
Tong Shen | 547cdfd | 2014-08-05 01:54:19 -0700 | [diff] [blame] | 1335 | std::vector<uint8_t>* Mir2Lir::ReturnFrameDescriptionEntry() { |
Mark Mendell | ae9fd93 | 2014-02-10 16:14:35 -0800 | [diff] [blame] | 1336 | // Default case is to do nothing. |
| 1337 | return nullptr; |
| 1338 | } |
| 1339 | |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1340 | RegLocation Mir2Lir::NarrowRegLoc(RegLocation loc) { |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 1341 | if (loc.location == kLocPhysReg) { |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 1342 | DCHECK(!loc.reg.Is32Bit()); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 1343 | if (loc.reg.IsPair()) { |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 1344 | RegisterInfo* info_lo = GetRegInfo(loc.reg.GetLow()); |
| 1345 | RegisterInfo* info_hi = GetRegInfo(loc.reg.GetHigh()); |
| 1346 | info_lo->SetIsWide(false); |
| 1347 | info_hi->SetIsWide(false); |
| 1348 | loc.reg = info_lo->GetReg(); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 1349 | } else { |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 1350 | RegisterInfo* info = GetRegInfo(loc.reg); |
| 1351 | RegisterInfo* info_new = info->FindMatchingView(RegisterInfo::k32SoloStorageMask); |
| 1352 | DCHECK(info_new != nullptr); |
| 1353 | if (info->IsLive() && (info->SReg() == loc.s_reg_low)) { |
| 1354 | info->MarkDead(); |
| 1355 | info_new->MarkLive(loc.s_reg_low); |
| 1356 | } |
| 1357 | loc.reg = info_new->GetReg(); |
buzbee | 091cc40 | 2014-03-31 10:14:40 -0700 | [diff] [blame] | 1358 | } |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 1359 | DCHECK(loc.reg.Valid()); |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1360 | } |
buzbee | 85089dd | 2014-05-25 15:10:52 -0700 | [diff] [blame] | 1361 | loc.wide = false; |
buzbee | 2700f7e | 2014-03-07 09:46:20 -0800 | [diff] [blame] | 1362 | return loc; |
| 1363 | } |
| 1364 | |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1365 | void Mir2Lir::GenMachineSpecificExtendedMethodMIR(BasicBlock* bb, MIR* mir) { |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1366 | UNUSED(bb, mir); |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1367 | LOG(FATAL) << "Unknown MIR opcode not supported on this architecture"; |
Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 1368 | UNREACHABLE(); |
Mark Mendell | d65c51a | 2014-04-29 16:55:20 -0400 | [diff] [blame] | 1369 | } |
| 1370 | |
Vladimir Marko | 767c752 | 2015-03-20 12:47:30 +0000 | [diff] [blame] | 1371 | void Mir2Lir::InitReferenceVRegs(BasicBlock* bb, BitVector* references) { |
| 1372 | // Mark the references coming from the first predecessor. |
| 1373 | DCHECK(bb != nullptr); |
| 1374 | DCHECK(bb->block_type == kEntryBlock || !bb->predecessors.empty()); |
| 1375 | BasicBlock* first_bb = |
| 1376 | (bb->block_type == kEntryBlock) ? bb : mir_graph_->GetBasicBlock(bb->predecessors[0]); |
| 1377 | DCHECK(first_bb != nullptr); |
| 1378 | DCHECK(first_bb->data_flow_info != nullptr); |
| 1379 | DCHECK(first_bb->data_flow_info->vreg_to_ssa_map_exit != nullptr); |
| 1380 | const int32_t* first_vreg_to_ssa_map = first_bb->data_flow_info->vreg_to_ssa_map_exit; |
| 1381 | references->ClearAllBits(); |
| 1382 | for (uint32_t vreg = 0, num_vregs = mir_graph_->GetNumOfCodeVRs(); vreg != num_vregs; ++vreg) { |
| 1383 | int32_t sreg = first_vreg_to_ssa_map[vreg]; |
| 1384 | if (sreg != INVALID_SREG && mir_graph_->reg_location_[sreg].ref && |
| 1385 | !mir_graph_->IsConstantNullRef(mir_graph_->reg_location_[sreg])) { |
| 1386 | references->SetBit(vreg); |
| 1387 | } |
| 1388 | } |
| 1389 | // Unmark the references that are merging with a different value. |
| 1390 | for (size_t i = 1u, num_pred = bb->predecessors.size(); i < num_pred; ++i) { |
| 1391 | BasicBlock* pred_bb = mir_graph_->GetBasicBlock(bb->predecessors[i]); |
| 1392 | DCHECK(pred_bb != nullptr); |
| 1393 | DCHECK(pred_bb->data_flow_info != nullptr); |
| 1394 | DCHECK(pred_bb->data_flow_info->vreg_to_ssa_map_exit != nullptr); |
| 1395 | const int32_t* pred_vreg_to_ssa_map = pred_bb->data_flow_info->vreg_to_ssa_map_exit; |
| 1396 | for (uint32_t vreg : references->Indexes()) { |
| 1397 | if (first_vreg_to_ssa_map[vreg] != pred_vreg_to_ssa_map[vreg]) { |
| 1398 | // NOTE: The BitVectorSet::IndexIterator will not check the pointed-to bit again, |
| 1399 | // so clearing the bit has no effect on the iterator. |
| 1400 | references->ClearBit(vreg); |
| 1401 | } |
| 1402 | } |
| 1403 | } |
| 1404 | if (bb->block_type != kEntryBlock && bb->first_mir_insn != nullptr && |
| 1405 | static_cast<int>(bb->first_mir_insn->dalvikInsn.opcode) == kMirOpCheckPart2) { |
| 1406 | // In Mir2Lir::MethodBlockCodeGen() we have artificially moved the throwing |
| 1407 | // instruction to the previous block. However, the MIRGraph data used above |
| 1408 | // doesn't reflect that, so we still need to process that MIR insn here. |
Pavel Vyssotski | 356a181 | 2015-03-27 15:23:02 +0600 | [diff] [blame] | 1409 | MIR* mir = nullptr; |
| 1410 | BasicBlock* pred_bb = bb; |
| 1411 | // Traverse empty blocks. |
| 1412 | while (mir == nullptr && pred_bb->predecessors.size() == 1u) { |
| 1413 | pred_bb = mir_graph_->GetBasicBlock(bb->predecessors[0]); |
| 1414 | DCHECK(pred_bb != nullptr); |
| 1415 | mir = pred_bb->last_mir_insn; |
| 1416 | } |
| 1417 | DCHECK(mir != nullptr); |
| 1418 | UpdateReferenceVRegsLocal(nullptr, mir, references); |
Vladimir Marko | 767c752 | 2015-03-20 12:47:30 +0000 | [diff] [blame] | 1419 | } |
| 1420 | } |
| 1421 | |
| 1422 | bool Mir2Lir::UpdateReferenceVRegsLocal(MIR* mir, MIR* prev_mir, BitVector* references) { |
| 1423 | DCHECK(mir == nullptr || mir->bb == prev_mir->bb); |
| 1424 | DCHECK(prev_mir != nullptr); |
| 1425 | while (prev_mir != nullptr) { |
| 1426 | if (prev_mir == mir) { |
| 1427 | return true; |
| 1428 | } |
| 1429 | const size_t num_defs = prev_mir->ssa_rep->num_defs; |
| 1430 | const int32_t* defs = prev_mir->ssa_rep->defs; |
| 1431 | if (num_defs == 1u && mir_graph_->reg_location_[defs[0]].ref && |
| 1432 | !mir_graph_->IsConstantNullRef(mir_graph_->reg_location_[defs[0]])) { |
| 1433 | references->SetBit(mir_graph_->SRegToVReg(defs[0])); |
| 1434 | } else { |
| 1435 | for (size_t i = 0u; i != num_defs; ++i) { |
| 1436 | references->ClearBit(mir_graph_->SRegToVReg(defs[i])); |
| 1437 | } |
| 1438 | } |
| 1439 | prev_mir = prev_mir->next; |
| 1440 | } |
| 1441 | return false; |
| 1442 | } |
| 1443 | |
| 1444 | void Mir2Lir::UpdateReferenceVRegs(MIR* mir, MIR* prev_mir, BitVector* references) { |
| 1445 | if (mir == nullptr) { |
| 1446 | // Safepoint in entry sequence. |
| 1447 | InitReferenceVRegs(mir_graph_->GetEntryBlock(), references); |
| 1448 | return; |
| 1449 | } |
| 1450 | if (IsInstructionReturn(mir->dalvikInsn.opcode) || |
| 1451 | mir->dalvikInsn.opcode == Instruction::RETURN_VOID_NO_BARRIER) { |
| 1452 | references->ClearAllBits(); |
| 1453 | if (mir->dalvikInsn.opcode == Instruction::RETURN_OBJECT) { |
| 1454 | references->SetBit(mir_graph_->SRegToVReg(mir->ssa_rep->uses[0])); |
| 1455 | } |
| 1456 | return; |
| 1457 | } |
| 1458 | if (prev_mir != nullptr && mir->bb == prev_mir->bb && |
| 1459 | UpdateReferenceVRegsLocal(mir, prev_mir, references)) { |
| 1460 | return; |
| 1461 | } |
| 1462 | BasicBlock* bb = mir_graph_->GetBasicBlock(mir->bb); |
| 1463 | DCHECK(bb != nullptr); |
| 1464 | InitReferenceVRegs(bb, references); |
| 1465 | bool success = UpdateReferenceVRegsLocal(mir, bb->first_mir_insn, references); |
| 1466 | DCHECK(success) << "MIR @0x" << std::hex << mir->offset << " not in BB#" << std::dec << mir->bb; |
| 1467 | } |
| 1468 | |
Brian Carlstrom | 7934ac2 | 2013-07-26 10:54:15 -0700 | [diff] [blame] | 1469 | } // namespace art |